Merge branch 'gerrit/goldfish' into 'master'

Change-Id: Ic8b85f85eb80fcfd046ebfca721de09c0ad5c753
diff --git a/asterixdb/LICENSE b/asterixdb/LICENSE
index 4c63d01..72413f8 100644
--- a/asterixdb/LICENSE
+++ b/asterixdb/LICENSE
@@ -517,6 +517,202 @@
    limitations under the License.
 ---
 
+   Portions of the AsterixDB OM
+       located at:
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java,
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java,
+       and
+         asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java
+
+   are available under the following license:
+---
+   Classes are modified to support the org.locationtech.jts package instead of com.vividsolutions.jts
+   Copyright BeDataDriven
+
+   Apache License
+   Version 2.0, January 2004
+   http://www.apache.org/licenses/
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+   1. Definitions.
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+   2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+   3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+   4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+   (a) You must give any other recipients of the Work or
+   Derivative Works a copy of this License; and
+   (b) You must cause any modified files to carry prominent notices
+   stating that You changed the files; and
+   (c) You must retain, in the Source form of any Derivative Works
+   that You distribute, all copyright, patent, trademark, and
+   attribution notices from the Source form of the Work,
+   excluding those notices that do not pertain to any part of
+   the Derivative Works; and
+   (d) If the Work includes a "NOTICE" text file as part of its
+   distribution, then any Derivative Works that You distribute must
+   include a readable copy of the attribution notices contained
+   within such NOTICE file, excluding those notices that do not
+   pertain to any part of the Derivative Works, in at least one
+   of the following places: within a NOTICE text file distributed
+   as part of the Derivative Works; within the Source form or
+   documentation, if provided along with the Derivative Works; or,
+   within a display generated by the Derivative Works, if and
+   wherever such third-party notices normally appear. The contents
+   of the NOTICE file are for informational purposes only and
+   do not modify the License. You may add Your own attribution
+   notices within Derivative Works that You distribute, alongside
+   or as an addendum to the NOTICE text from the Work, provided
+   that such additional attribution notices cannot be construed
+   as modifying the License.
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+   6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+   7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+   8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+   9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+   END OF TERMS AND CONDITIONS
+   APPENDIX: How to apply the Apache License to your work.
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+   Copyright [yyyy] [name of copyright owner]
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+   http://www.apache.org/licenses/LICENSE-2.0
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+   For more details on the original project and licensing, please visit
+   https://github.com/bedatadriven/jackson-datatype-jts.
+---
    Portions of the AsterixDB API examples
        located at:
          asterix-examples/src/main/resources/admaql101-demo/bottle.py,
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java
index 4469dbd..eef5884 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java
@@ -90,16 +90,17 @@
                 CompilerProperties.COMPILER_SORT_SAMPLES_KEY, CompilerProperties.COMPILER_EXTERNALSCANMEMORY_KEY,
                 CompilerProperties.COMPILER_INDEXONLY_KEY, CompilerProperties.COMPILER_INTERNAL_SANITYCHECK_KEY,
                 CompilerProperties.COMPILER_EXTERNAL_FIELD_PUSHDOWN_KEY, CompilerProperties.COMPILER_SUBPLAN_MERGE_KEY,
-                CompilerProperties.COMPILER_SUBPLAN_NESTEDPUSHDOWN_KEY, CompilerProperties.COMPILER_ARRAYINDEX_KEY,
-                CompilerProperties.COMPILER_CBO_KEY, CompilerProperties.COMPILER_CBO_TEST_KEY,
-                CompilerProperties.COMPILER_FORCE_JOIN_ORDER_KEY, CompilerProperties.COMPILER_QUERY_PLAN_SHAPE_KEY,
-                CompilerProperties.COMPILER_MIN_MEMORY_ALLOCATION_KEY, CompilerProperties.COMPILER_COLUMN_FILTER_KEY,
-                CompilerProperties.COMPILER_BATCH_LOOKUP_KEY, FunctionUtil.IMPORT_PRIVATE_FUNCTIONS,
-                FuzzyUtils.SIM_FUNCTION_PROP_NAME, FuzzyUtils.SIM_THRESHOLD_PROP_NAME,
-                StartFeedStatement.WAIT_FOR_COMPLETION, FeedActivityDetails.FEED_POLICY_NAME,
-                FeedActivityDetails.COLLECT_LOCATIONS, SqlppQueryRewriter.INLINE_WITH_OPTION,
-                SqlppExpressionToPlanTranslator.REWRITE_IN_AS_OR_OPTION, "hash_merge", "output-record-type",
-                DisjunctivePredicateToJoinRule.REWRITE_OR_AS_JOIN_OPTION,
+                CompilerProperties.COMPILER_SUBPLAN_NESTEDPUSHDOWN_KEY, CompilerProperties.COMPILER_ORDERFIELDS_KEY,
+                CompilerProperties.COMPILER_ARRAYINDEX_KEY, CompilerProperties.COMPILER_CBO_KEY,
+                CompilerProperties.COMPILER_CBO_TEST_KEY, CompilerProperties.COMPILER_FORCE_JOIN_ORDER_KEY,
+                CompilerProperties.COMPILER_QUERY_PLAN_SHAPE_KEY, CompilerProperties.COMPILER_MIN_MEMORY_ALLOCATION_KEY,
+                CompilerProperties.COMPILER_COLUMN_FILTER_KEY, CompilerProperties.COMPILER_BATCH_LOOKUP_KEY,
+                FunctionUtil.IMPORT_PRIVATE_FUNCTIONS,
+                CompilerProperties.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING_KEY, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
+                FuzzyUtils.SIM_THRESHOLD_PROP_NAME, StartFeedStatement.WAIT_FOR_COMPLETION,
+                FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS,
+                SqlppQueryRewriter.INLINE_WITH_OPTION, SqlppExpressionToPlanTranslator.REWRITE_IN_AS_OR_OPTION,
+                "hash_merge", "output-record-type", DisjunctivePredicateToJoinRule.REWRITE_OR_AS_JOIN_OPTION,
                 SetAsterixPhysicalOperatorsRule.REWRITE_ATTEMPT_BATCH_ASSIGN,
                 EquivalenceClassUtils.REWRITE_INTERNAL_QUERYUID_PK, SqlppQueryRewriter.SQL_COMPAT_OPTION,
                 JoinEnum.CBO_FULL_ENUM_LEVEL_KEY, JoinEnum.CBO_CP_ENUM_KEY));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java
index d56d38a..b5c290f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java
@@ -29,6 +29,7 @@
     protected boolean outerJoin;
     private boolean derived = false;
     protected boolean partOfComposite = false;
+    protected boolean deleted = false;
     protected int numberOfVars = 0; // how many variables
     protected int componentNumber = 0; // for identifying if join graph is connected
     protected int datasetBits;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java
index 2c430b1..f0ca98d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java
@@ -300,10 +300,10 @@
         parent.getInputs().get(0).setValue(deepCopyofScan);
         finalDatasetCard = origDatasetCard = idxDetails.getSourceCardinality();
         sampleCard = Math.min(idxDetails.getSampleCardinalityTarget(), origDatasetCard);
-        boolean unnest = joinEnum.findUnnestOp(selOp);
+        boolean unnest = joinEnum.findUnnestOp(leafInput);
         if (unnest) {
             ILogicalExpression saveExpr = selOp.getCondition().getValue();
-            double unnestSampleCard = joinEnum.stats.computeUnnestedOriginalCardinality(selOp);
+            double unnestSampleCard = joinEnum.stats.computeUnnestedOriginalCardinality(leafInput);
             selOp.getCondition().setValue(saveExpr); // restore the expression
             unnestFactor = unnestSampleCard / sampleCard;
             sampleCard = unnestSampleCard;
@@ -446,7 +446,10 @@
         if (this.applicableJoinConditions.size() >= 3) {
             redundantSel = removeRedundantPred(this.applicableJoinConditions);
         }
-
+        // mark all conditions back to non deleted state
+        for (JoinCondition jc : joinConditions) {
+            jc.deleted = false;
+        }
         // By dividing by redundantSel, we are undoing the earlier multiplication of all the selectivities.
         return joinCard / redundantSel;
     }
@@ -456,7 +459,7 @@
         if (jc1.comparisonType == JoinCondition.comparisonOp.OP_EQ
                 && jc2.comparisonType == JoinCondition.comparisonOp.OP_EQ
                 && jc3.comparisonType == JoinCondition.comparisonOp.OP_EQ) {
-            sel = findRedundantSel(jc1.selectivity, jc2.selectivity, jc3.selectivity);
+            sel = findRedundantSel(jc1, jc2, jc3);
         } else {
             // at least one of the predicates in not an equality predicate
             //this can get messy here, as 1, or 2 or all 3 can be non equality
@@ -472,6 +475,35 @@
         return sel;
     }
 
+    private static double findRedundantSel(JoinCondition jc1, JoinCondition jc2, JoinCondition jc3) {
+        // find middle selectivity
+        if (jc2.selectivity <= jc1.selectivity && jc1.selectivity <= jc3.selectivity) {
+            jc1.deleted = true;
+            return jc1.selectivity;
+        }
+        if (jc3.selectivity <= jc1.selectivity && jc1.selectivity <= jc2.selectivity) {
+            jc1.deleted = true;
+            return jc1.selectivity;
+        }
+        if (jc1.selectivity <= jc2.selectivity && jc2.selectivity <= jc3.selectivity) {
+            jc2.deleted = true;
+            return jc2.selectivity;
+        }
+        if (jc3.selectivity <= jc2.selectivity && jc2.selectivity <= jc1.selectivity) {
+            jc2.deleted = true;
+            return jc2.selectivity;
+        }
+        if (jc1.selectivity <= jc3.selectivity && jc3.selectivity <= jc2.selectivity) {
+            jc3.deleted = true;
+            return jc3.selectivity;
+        }
+        if (jc2.selectivity <= jc3.selectivity && jc3.selectivity <= jc1.selectivity) {
+            jc3.deleted = true;
+            return jc3.selectivity;
+        }
+        return 1.0; // keep compiler happy
+    }
+
     // if a redundant edge is found, we need to eliminate one of the edges.
     // If two triangles share an edge, removing the common edge will suffice
     // Each edge has two vertices. So we can only handle predicate with exactly two tables such as R.a = S.a
@@ -485,21 +517,21 @@
         int[] verticesCopy = new int[6];
         for (int i = 0; i <= applicablePredicatesInCurrentJn.size() - 3; i++) {
             jc1 = joinConditions.get(applicablePredicatesInCurrentJn.get(i));
-            if (jc1.partOfComposite) {
+            if (jc1.partOfComposite || jc1.deleted) {
                 continue; // must ignore these or the same triangles will be found more than once.
             }
             vertices[0] = jc1.leftSideBits;
             vertices[1] = jc1.rightSideBits;
             for (int j = i + 1; j <= applicablePredicatesInCurrentJn.size() - 2; j++) {
                 jc2 = joinConditions.get(applicablePredicatesInCurrentJn.get(j));
-                if (jc2.partOfComposite) {
+                if (jc2.partOfComposite || jc2.deleted) {
                     continue;
                 }
                 vertices[2] = jc2.leftSideBits;
                 vertices[3] = jc2.rightSideBits;
                 for (int k = j + 1; k <= applicablePredicatesInCurrentJn.size() - 1; k++) {
                     jc3 = joinConditions.get(applicablePredicatesInCurrentJn.get(k));
-                    if (jc3.partOfComposite) {
+                    if (jc3.partOfComposite || jc3.deleted) {
                         continue;
                     }
                     vertices[4] = jc3.leftSideBits;
@@ -510,7 +542,9 @@
                     if (verticesCopy[0] == verticesCopy[1] && verticesCopy[2] == verticesCopy[3]
                             && verticesCopy[4] == verticesCopy[5]) {
                         // redundant edge found
-                        redundantSel *= adjustSelectivities(jc1, jc2, jc3);
+                        if (!(jc1.deleted || jc2.deleted || jc3.deleted)) {
+                            redundantSel *= adjustSelectivities(jc1, jc2, jc3);
+                        }
                     }
                 }
             }
@@ -518,16 +552,6 @@
         return redundantSel;
     }
 
-    private static double findRedundantSel(double sel1, double sel2, double sel3) {
-        double[] sels = new double[3];
-        sels[0] = sel1;
-        sels[1] = sel2;
-        sels[2] = sel3;
-
-        Arrays.sort(sels); // we are sorting to make this deterministic
-        return sels[1]; // the middle one is closest to one of the extremes
-    }
-
     protected int addSingleDatasetPlans() {
         List<PlanNode> allPlans = joinEnum.allPlans;
         ICost opCost;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java
index aa2f40f..c6f274f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java
@@ -118,9 +118,7 @@
             // Since there is a left and right dataset here, expecting only two variables.
             return 1.0;
         }
-        if (!(joinExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ))) {
-            return 0.5; // we will assume half; rest of the code assumes EQ joins
-        }
+
         int idx1, idx2;
         if (joinEnum.varLeafInputIds.containsKey(exprUsedVars.get(0))) {
             idx1 = joinEnum.varLeafInputIds.get(exprUsedVars.get(0));
@@ -168,6 +166,9 @@
         } else {
             ILogicalOperator leafInput;
             LogicalVariable var;
+            if (!(joinExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ))) {
+                return 0.5; // we will assume half; rest of the code assumes EQ joins
+            }
             // choose the smaller side sample; better results this way for sure!
             if (card1 < card2) {
                 leafInput = joinEnum.leafInputs.get(idx1 - 1);
@@ -537,11 +538,11 @@
         return record.numberOfFields();
     }
 
-    public double computeUnnestedOriginalCardinality(SelectOperator selOp) throws AlgebricksException {
+    public double computeUnnestedOriginalCardinality(ILogicalOperator op) throws AlgebricksException {
         // Replace ALL SELECTS with TRUE, restore them after running the sampling query.
-        List<ILogicalExpression> selExprs = storeSelectConditionsAndMakeThemTrue(selOp, null);
-        List<List<IAObject>> result = runSamplingQuery(optCtx, selOp);
-        restoreAllSelectConditions(selOp, selExprs, null);
+        List<ILogicalExpression> selExprs = storeSelectConditionsAndMakeThemTrue(op, null);
+        List<List<IAObject>> result = runSamplingQuery(optCtx, op);
+        restoreAllSelectConditions(op, selExprs, null);
         return findPredicateCardinality(result, false);
     }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
index 01b8527..a39bc06 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
@@ -33,6 +33,7 @@
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Index;
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.SourceLocation;
 
@@ -605,6 +606,8 @@
         private final List<Expression> keyExpressions;
         private final boolean autogenerated;
 
+        private final ARecordType itemType;
+
         public CompiledCopyToStatement(CopyToStatement copyToStatement) {
             this.query = copyToStatement.getQuery();
             this.sourceVariable = copyToStatement.getSourceVariable();
@@ -619,6 +622,7 @@
             this.orderByNullModifierList = copyToStatement.getOrderByNullModifierList();
             this.keyExpressions = copyToStatement.getKeyExpressions();
             this.autogenerated = copyToStatement.isAutogenerated();
+            this.itemType = eddDecl.getItemType();
         }
 
         @Override
@@ -642,6 +646,10 @@
             return properties;
         }
 
+        public ARecordType getItemType() {
+            return itemType;
+        }
+
         public List<Expression> getPathExpressions() {
             return pathExpressions;
         }
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index d5c916f..978997c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -466,7 +466,8 @@
         }
 
         // Write adapter configuration
-        WriteDataSink writeDataSink = new WriteDataSink(copyTo.getAdapter(), copyTo.getProperties());
+        WriteDataSink writeDataSink = new WriteDataSink(copyTo.getAdapter(), copyTo.getProperties(),
+                copyTo.getItemType(), expr.getSourceLocation());
 
         // writeOperator
         WriteOperator writeOperator = new WriteOperator(sourceExprRef, new MutableObject<>(fullPathExpr),
diff --git a/asterixdb/asterix-app/data/hdfs/parquet/partition_heterogeneous.json b/asterixdb/asterix-app/data/hdfs/parquet/partition_heterogeneous.json
new file mode 100644
index 0000000..e6179d6
--- /dev/null
+++ b/asterixdb/asterix-app/data/hdfs/parquet/partition_heterogeneous.json
@@ -0,0 +1,100 @@
+{"id": 1, "partitioner_key": "A", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 2, "partitioner_key": "A", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 3, "partitioner_key": "C", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 4, "partitioner_key": "C", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 5, "partitioner_key": "A", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 6, "partitioner_key": "C", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 7, "partitioner_key": "A", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 8, "partitioner_key": "B", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 9, "partitioner_key": "A", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 10, "partitioner_key": "B", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 11, "partitioner_key": "B", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 12, "partitioner_key": "B", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 13, "partitioner_key": "A", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 14, "partitioner_key": "A", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 15, "partitioner_key": "C", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 16, "partitioner_key": "C", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 17, "partitioner_key": "B", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 18, "partitioner_key": "C", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 19, "partitioner_key": "C", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 20, "partitioner_key": "B", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 21, "partitioner_key": "B", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 22, "partitioner_key": "A", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 23, "partitioner_key": "C", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 24, "partitioner_key": "B", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 25, "partitioner_key": "C", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 26, "partitioner_key": "A", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 27, "partitioner_key": "A", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 28, "partitioner_key": "B", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 29, "partitioner_key": "C", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 30, "partitioner_key": "C", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 31, "partitioner_key": "C", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 32, "partitioner_key": "B", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 33, "partitioner_key": "C", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 34, "partitioner_key": "B", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 35, "partitioner_key": "B", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 36, "partitioner_key": "B", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 37, "partitioner_key": "B", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 38, "partitioner_key": "A", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 39, "partitioner_key": "B", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 40, "partitioner_key": "C", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 41, "partitioner_key": "A", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 42, "partitioner_key": "A", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 43, "partitioner_key": "C", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 44, "partitioner_key": "C", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 45, "partitioner_key": "A", "name": "Virat Kohli", "randomField": 1234, "active": true, "price": 99.99}
+{"id": 46, "partitioner_key": "B", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 47, "partitioner_key": "C", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 48, "partitioner_key": "A", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 49, "partitioner_key": "A", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 50, "partitioner_key": "C", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 51, "partitioner_key": "B", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 52, "partitioner_key": "A", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 53, "partitioner_key": "C", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 54, "partitioner_key": "B", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 55, "partitioner_key": "A", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 56, "partitioner_key": "B", "name": "Virat Kohli", "randomField": 1234, "active": true, "price": 99.99}
+{"id": 57, "partitioner_key": "B", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 58, "partitioner_key": "B", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 59, "partitioner_key": "A", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 60, "partitioner_key": "C", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 61, "partitioner_key": "C", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 62, "partitioner_key": "A", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 63, "partitioner_key": "A", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 64, "partitioner_key": "C", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 65, "partitioner_key": "A", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 66, "partitioner_key": "B", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 67, "partitioner_key": "B", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 68, "partitioner_key": "C", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 69, "partitioner_key": "C", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 70, "partitioner_key": "A", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 71, "partitioner_key": "A", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 72, "partitioner_key": "B", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 73, "partitioner_key": "C", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 74, "partitioner_key": "C", "name": "Virat Kohli", "randomField": 1234, "active": true, "price": 99.99}
+{"id": 75, "partitioner_key": "A", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 76, "partitioner_key": "B", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 77, "partitioner_key": "A", "name": false, "randomField": [1, 2, 3], "active": {"status": "active"}, "price": "one hundred"}
+{"id": 78, "partitioner_key": "A", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 79, "partitioner_key": "C", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 80, "partitioner_key": "A", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 81, "partitioner_key": "C", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 82, "partitioner_key": "A", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 83, "partitioner_key": "A", "name": "None", "randomField": "5678", "active": false, "price": {"currency": "INR", "value": 99.99}}
+{"id": 84, "partitioner_key": "A", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 85, "partitioner_key": "C", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 86, "partitioner_key": "C", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 87, "partitioner_key": "C", "name": ["Kohli", "Dhoni"], "randomField": 18, "active": 1, "price": {"amount": 99.99, "currency": "USD"}}
+{"id": 88, "partitioner_key": "A", "name": {"nickname": "VK"}, "randomField": 5678, "active": [true, false], "price": 100}
+{"id": 89, "partitioner_key": "C", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 90, "partitioner_key": "C", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 91, "partitioner_key": "A", "name": "Virat Kohli", "randomField": 1234, "active": true, "price": 99.99}
+{"id": 92, "partitioner_key": "A", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 93, "partitioner_key": "B", "name": 3456, "randomField": {"value": 7890}, "active": true, "price": "101.50"}
+{"id": 94, "partitioner_key": "A", "name": {"first": "MS", "last": "Dhoni"}, "randomField": [9, 8, 7], "active": "no", "price": 101.5}
+{"id": 95, "partitioner_key": "B", "name": ["Dhoni", "Kohli"], "randomField": "None", "active": 0, "price": 50}
+{"id": 96, "partitioner_key": "B", "name": "Virat Kohli", "randomField": 1234, "active": true, "price": 99.99}
+{"id": 97, "partitioner_key": "A", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 98, "partitioner_key": "B", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
+{"id": 99, "partitioner_key": "A", "name": 9876, "randomField": {"identifier": "ORD1234", "type": "order"}, "active": "active", "price": [99, 100, 101]}
+{"id": 100, "partitioner_key": "C", "name": {"first": "Virat", "last": "Kohli"}, "randomField": "1234", "active": "yes", "price": "99.99"}
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index 63bb7da..5b2f8a4 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -513,7 +513,7 @@
     <profile>
       <id>asterix-gerrit-asterix-app</id>
       <properties>
-        <test.excludes>**/CloudStorageTest.java,**/CloudStorageGCSTest.java,**/SqlppExecutionWithCancellationTest.java,**/DmlTest.java,**/RepeatedTest.java,**/SqlppExecutionTest.java,**/SqlppExecutionColumnTest.java,**/*StaticPartitioning*Test.java,**/*Ssl*Test.java,**/Podman*.java,**/*AnalyzedExecutionTest.java,**/SqlppProfiledExecutionTest.java,**/CloudPythonTest.java</test.excludes>
+        <test.excludes>**/CloudStorageTest.java,**/CloudStorageGCSTest.java,**/SqlppExecutionWithCancellationTest.java,**/DmlTest.java,**/RepeatedTest.java,**/SqlppExecutionTest.java,**/SqlppExecutionColumnTest.java,**/*StaticPartitioning*Test.java,**/*Ssl*Test.java,**/Podman*.java,**/*AnalyzedExecutionTest.java,**/SqlppProfiledExecutionTest.java,**/CloudPythonTest.java,**/CloudStorageAzTest.java</test.excludes>
         <itest.excludes>**/*.java</itest.excludes>
       </properties>
       <build>
@@ -590,6 +590,43 @@
         <failIfNoTests>false</failIfNoTests>
       </properties>
     </profile>
+    <profile>
+        <id>azurite-tests</id>
+        <build>
+            <plugins>
+                <plugin>
+                    <groupId>com.github.eirslett</groupId>
+                    <artifactId>frontend-maven-plugin</artifactId>
+                    <version>1.13.4</version>
+                    <configuration>
+                        <nodeVersion>v14.15.4</nodeVersion>
+                        <npmVersion>6.14.11</npmVersion>
+                        <workingDirectory>target/npm</workingDirectory>
+                        <installDirectory>target/npm</installDirectory>
+                    </configuration>
+                    <executions>
+                        <execution>
+                            <id>install node and yarn</id>
+                            <goals>
+                                <goal>install-node-and-npm</goal>
+                            </goals>
+                            <phase>${azurite.npm.install.stage}</phase>
+                        </execution>
+                        <execution>
+                            <id>azurite blob</id>
+                            <phase>${azurite.install.stage}</phase>
+                            <goals>
+                                <goal>npm</goal>
+                            </goals>
+                            <configuration>
+                                <arguments>install azurite</arguments>
+                            </configuration>
+                        </execution>
+                    </executions>
+                </plugin>
+            </plugins>
+        </build>
+    </profile>
   </profiles>
   <dependencies>
     <dependency>
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
index 47498ea..db7c21a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -234,9 +234,14 @@
 
         ICcApplicationContext ccAppContext = metadataProvider.getApplicationContext();
         CompilerProperties compilerProperties = ccAppContext.getCompilerProperties();
-        Map<String, Object> querySpecificConfig = validateConfig(metadataProvider.getConfig(), sourceLoc);
+        Map<String, Object> config = metadataProvider.getConfig();
+        Map<String, Object> querySpecificConfig = validateConfig(config, sourceLoc);
         final PhysicalOptimizationConfig physOptConf = OptimizationConfUtil.createPhysicalOptimizationConf(
                 compilerProperties, querySpecificConfig, configurableParameterNames, sourceLoc);
+        if (!config.containsKey(CompilerProperties.COMPILER_ORDERFIELDS_KEY)) {
+            config.put(CompilerProperties.COMPILER_ORDERFIELDS_KEY, Boolean.toString(physOptConf.isOrderField()));
+        }
+
         boolean cboMode = physOptConf.getCBOMode() || physOptConf.getCBOTestMode();
         HeuristicCompilerFactoryBuilder builder =
                 new HeuristicCompilerFactoryBuilder(OptimizationContextFactory.INSTANCE);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java
index 45776a4..470ec34 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java
@@ -85,6 +85,9 @@
     }
 
     protected boolean invalidArgs(List<Mutable<ILogicalExpression>> args) {
+        if (functionId.getArity() == FunctionIdentifier.VARARGS) {
+            return false;
+        }
         return args.size() != functionId.getArity();
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeDatasource.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeDatasource.java
new file mode 100644
index 0000000..03d7bed
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeDatasource.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.app.function.collectionsize;
+
+import java.util.Objects;
+
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.metadata.api.IDatasourceFunction;
+import org.apache.asterix.metadata.declared.DataSourceId;
+import org.apache.asterix.metadata.declared.FunctionDataSource;
+import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain;
+
+public class StorageSizeDatasource extends FunctionDataSource {
+
+    private static final DataSourceId STORAGE_SIZE_DATASOURCE_ID =
+            new DataSourceId(StorageSizeRewriter.STORAGE_SIZE.getDatabase(),
+                    FunctionSignature.getDataverseName(StorageSizeRewriter.STORAGE_SIZE),
+                    StorageSizeRewriter.STORAGE_SIZE.getName());
+    private final String database;
+    private final DataverseName dataverse;
+    private final String collection;
+    private final String index;
+
+    StorageSizeDatasource(INodeDomain domain, String database, DataverseName dataverse, String collection, String index)
+            throws AlgebricksException {
+        super(STORAGE_SIZE_DATASOURCE_ID, StorageSizeRewriter.STORAGE_SIZE, domain);
+        this.database = database;
+        this.dataverse = dataverse;
+        this.collection = collection;
+        this.index = index;
+    }
+
+    public String getDatabase() {
+        return database;
+    }
+
+    public DataverseName getDataverse() {
+        return dataverse;
+    }
+
+    public String getCollection() {
+        return collection;
+    }
+
+    public String getIndex() {
+        return index;
+    }
+
+    @Override
+    protected IDatasourceFunction createFunction(MetadataProvider metadataProvider,
+            AlgebricksAbsolutePartitionConstraint locations) {
+        return new StorageSizeFunction(AlgebricksAbsolutePartitionConstraint.randomLocation(locations.getLocations()),
+                database, dataverse, collection, index);
+    }
+
+    @Override
+    protected boolean sameFunctionDatasource(FunctionDataSource other) {
+        if (!Objects.equals(this.functionId, other.getFunctionId())) {
+            return false;
+        }
+        StorageSizeDatasource that = (StorageSizeDatasource) other;
+        return Objects.equals(this.database, that.getDatabase()) && Objects.equals(this.dataverse, that.getDataverse())
+                && Objects.equals(this.collection, that.getCollection()) && Objects.equals(this.index, that.getIndex());
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeFunction.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeFunction.java
new file mode 100644
index 0000000..9f165bd
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeFunction.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.app.function.collectionsize;
+
+import static org.apache.asterix.app.message.ExecuteStatementRequestMessage.DEFAULT_NC_TIMEOUT_MILLIS;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.app.message.CalculateStorageSizeRequestMessage;
+import org.apache.asterix.app.message.CalculateStorageSizeResponseMessage;
+import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.MessageFuture;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.metadata.declared.AbstractDatasourceFunction;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.INCServiceContext;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class StorageSizeFunction extends AbstractDatasourceFunction {
+
+    private static final Logger LOGGER = LogManager.getLogger();
+    private static final long serialVersionUID = 1L;
+
+    private final String database;
+    private final DataverseName dataverse;
+    private final String collection;
+    private final String index;
+
+    StorageSizeFunction(AlgebricksAbsolutePartitionConstraint locations, String database, DataverseName dataverse,
+            String collection, String index) {
+        super(locations);
+        this.database = database;
+        this.dataverse = dataverse;
+        this.collection = collection;
+        this.index = index;
+    }
+
+    @Override
+    public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        INCServiceContext serviceCtx = ctx.getJobletContext().getServiceContext();
+        INCMessageBroker messageBroker = (INCMessageBroker) serviceCtx.getMessageBroker();
+        MessageFuture messageFuture = messageBroker.registerMessageFuture();
+        long futureId = messageFuture.getFutureId();
+        CalculateStorageSizeRequestMessage request = new CalculateStorageSizeRequestMessage(serviceCtx.getNodeId(),
+                futureId, database, dataverse, collection, index);
+        try {
+            messageBroker.sendMessageToPrimaryCC(request);
+            CalculateStorageSizeResponseMessage response = (CalculateStorageSizeResponseMessage) messageFuture
+                    .get(DEFAULT_NC_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
+            if (response.getFailure() != null) {
+                throw HyracksDataException.create(response.getFailure());
+            }
+            return new StorageSizeReader(response.getSize());
+        } catch (Exception e) {
+            LOGGER.info("Could not calculate collection size", e);
+            throw HyracksDataException.create(e);
+        } finally {
+            messageBroker.deregisterMessageFuture(futureId);
+        }
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeReader.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeReader.java
new file mode 100644
index 0000000..733d39c
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeReader.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.app.function.collectionsize;
+
+import java.io.IOException;
+
+import org.apache.asterix.app.function.FunctionReader;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.input.record.CharArrayRecord;
+
+public class StorageSizeReader extends FunctionReader {
+
+    private final long size;
+    private final CharArrayRecord record;
+    private boolean hasNext = true;
+
+    StorageSizeReader(long size) {
+        this.size = size;
+        record = new CharArrayRecord();
+    }
+
+    @Override
+    public boolean hasNext() throws IOException {
+        return hasNext;
+    }
+
+    @Override
+    public IRawRecord<char[]> next() throws IOException {
+        hasNext = false;
+        record.reset();
+        String result = "{\"size\":" + size + "}";
+        record.append(result.toCharArray());
+        record.endRecord();
+        return record;
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeRewriter.java
new file mode 100644
index 0000000..d551b7d
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/collectionsize/StorageSizeRewriter.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.app.function.collectionsize;
+
+import static org.apache.asterix.common.exceptions.ErrorCode.TYPE_MISMATCH_FUNCTION;
+
+import java.util.List;
+
+import org.apache.asterix.app.function.FunctionRewriter;
+import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.functions.FunctionConstants;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.metadata.declared.FunctionDataSource;
+import org.apache.asterix.om.constants.AsterixConstantValue;
+import org.apache.asterix.om.exceptions.ExceptionUtil;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.utils.ConstantExpressionUtil;
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+/**
+ * This function takes a collection's fully qualified name (database.scope.collection) and returns the collection's size
+ */
+
+public class StorageSizeRewriter extends FunctionRewriter {
+
+    public static final FunctionIdentifier STORAGE_SIZE =
+            FunctionConstants.newAsterix("storage-size", FunctionIdentifier.VARARGS);
+    public static final StorageSizeRewriter INSTANCE = new StorageSizeRewriter(STORAGE_SIZE);
+
+    private StorageSizeRewriter(FunctionIdentifier functionId) {
+        super(functionId);
+    }
+
+    @Override
+    protected FunctionDataSource toDatasource(IOptimizationContext context, AbstractFunctionCallExpression function)
+            throws AlgebricksException {
+
+        if (function.getArguments().size() < 3 || function.getArguments().size() > 4) {
+            throw new CompilationException(ErrorCode.COMPILATION_INVALID_NUM_OF_ARGS, STORAGE_SIZE.getName());
+        }
+
+        verifyArgs(function.getArguments());
+        ILogicalExpression databaseExpr = function.getArguments().get(0).getValue();
+        ILogicalExpression scopeExpr = function.getArguments().get(1).getValue();
+        ILogicalExpression collectionExpr = function.getArguments().get(2).getValue();
+        ILogicalExpression indexExpr = null;
+        if (function.getArguments().size() == 4) {
+            indexExpr = function.getArguments().get(3).getValue();
+        }
+
+        String database = ConstantExpressionUtil.getStringConstant(databaseExpr);
+        DataverseName dataverse =
+                DataverseName.createSinglePartName(ConstantExpressionUtil.getStringConstant(scopeExpr));
+        String collection = ConstantExpressionUtil.getStringConstant(collectionExpr);
+        String index = indexExpr != null ? ConstantExpressionUtil.getStringConstant(indexExpr) : null;
+
+        return new StorageSizeDatasource(context.getComputationNodeDomain(), database, dataverse, collection, index);
+    }
+
+    private void verifyArgs(List<Mutable<ILogicalExpression>> args) throws CompilationException {
+        for (int i = 0; i < args.size(); i++) {
+            ConstantExpression expr = (ConstantExpression) args.get(i).getValue();
+            AsterixConstantValue value = (AsterixConstantValue) expr.getValue();
+            ATypeTag type = value.getObject().getType().getTypeTag();
+            if (type != ATypeTag.STRING) {
+                throw new CompilationException(TYPE_MISMATCH_FUNCTION, STORAGE_SIZE.getName(),
+                        ExceptionUtil.indexToPosition(i), ATypeTag.STRING, type);
+            }
+        }
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CalculateStorageSizeRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CalculateStorageSizeRequestMessage.java
new file mode 100644
index 0000000..1163e5c
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CalculateStorageSizeRequestMessage.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.message;
+
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.messaging.CCMessageBroker;
+import org.apache.asterix.utils.StorageUtil;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class CalculateStorageSizeRequestMessage implements ICcAddressedMessage {
+
+    private static final Logger LOGGER = LogManager.getLogger();
+    private static final long serialVersionUID = 1L;
+    private static final int FAILED_CALCULATED_SIZE = -1;
+    private final String nodeId;
+    private final long reqId;
+    private final String database;
+    private final DataverseName dataverse;
+    private final String collection;
+    private final String index;
+
+    public CalculateStorageSizeRequestMessage(String nodeId, long reqId, String database, DataverseName dataverse,
+            String collection, String index) {
+        this.nodeId = nodeId;
+        this.reqId = reqId;
+        this.database = database;
+        this.dataverse = dataverse;
+        this.collection = collection;
+        this.index = index;
+    }
+
+    @Override
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException {
+        CCMessageBroker messageBroker = (CCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+
+        try {
+            long size = StorageUtil.getCollectionSize(appCtx, database, dataverse, collection, index);
+            CalculateStorageSizeResponseMessage response =
+                    new CalculateStorageSizeResponseMessage(this.reqId, size, null);
+            messageBroker.sendApplicationMessageToNC(response, nodeId);
+        } catch (Exception ex) {
+            LOGGER.info("Failed to process request", ex);
+            try {
+                CalculateStorageSizeResponseMessage response =
+                        new CalculateStorageSizeResponseMessage(this.reqId, FAILED_CALCULATED_SIZE, ex);
+                messageBroker.sendApplicationMessageToNC(response, nodeId);
+            } catch (Exception ex2) {
+                LOGGER.info("Failed to process request", ex2);
+                throw HyracksDataException.create(ex2);
+            }
+        }
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CalculateStorageSizeResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CalculateStorageSizeResponseMessage.java
new file mode 100644
index 0000000..aa7989e
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CalculateStorageSizeResponseMessage.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.message;
+
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
+import org.apache.asterix.common.messaging.api.MessageFuture;
+import org.apache.asterix.messaging.NCMessageBroker;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class CalculateStorageSizeResponseMessage implements INcAddressedMessage {
+
+    private static final long serialVersionUID = 1L;
+    private final long reqId;
+    private final long size;
+    private final Throwable failure;
+
+    public CalculateStorageSizeResponseMessage(long reqId, long size, Throwable failure) {
+        this.reqId = reqId;
+        this.size = size;
+        this.failure = failure;
+    }
+
+    @Override
+    public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        NCMessageBroker mb = (NCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+        MessageFuture future = mb.deregisterMessageFuture(reqId);
+        if (future != null) {
+            future.complete(this);
+        }
+    }
+
+    public long getSize() {
+        return size;
+    }
+
+    public Throwable getFailure() {
+        return failure;
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageSizeRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageSizeRequestMessage.java
new file mode 100644
index 0000000..82b425b
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageSizeRequestMessage.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.message;
+
+import java.util.function.Predicate;
+
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.CcIdentifiedMessage;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
+import org.apache.asterix.common.storage.ResourceReference;
+import org.apache.asterix.messaging.NCMessageBroker;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.io.IIOManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class StorageSizeRequestMessage extends CcIdentifiedMessage implements INcAddressedMessage {
+
+    private static final Logger LOGGER = LogManager.getLogger();
+    private static final long serialVersionUID = 1L;
+    private final long reqId;
+    private final String database;
+    private final String dataverse;
+    private final String collection;
+    private final String index;
+
+    public StorageSizeRequestMessage(long reqId, String database, String dataverse, String collection, String index) {
+        this.reqId = reqId;
+        this.database = database;
+        this.dataverse = dataverse;
+        this.collection = collection;
+        this.index = index;
+    }
+
+    @Override
+    public void handle(INcApplicationContext appCtx) throws HyracksDataException {
+        try {
+            Predicate<String> predicate = getPredicate();
+            IIOManager ioManager = appCtx.getPersistenceIoManager();
+            StorageSizeResponseMessage response =
+                    new StorageSizeResponseMessage(reqId, ioManager.getSize(predicate), null);
+            respond(appCtx, response);
+        } catch (Exception e) {
+            LOGGER.info("failed to get collection size", e);
+            StorageSizeResponseMessage response = new StorageSizeResponseMessage(reqId, 0, e);
+            respond(appCtx, response);
+        }
+    }
+
+    private Predicate<String> getPredicate() {
+        return path -> {
+            ResourceReference resourceReference = ResourceReference.of(path);
+            if (resourceReference.getDatabase().equals(database)
+                    && resourceReference.getDataverse().getCanonicalForm().equals(dataverse)
+                    && resourceReference.getDataset().equals(collection)) {
+                if (index != null) {
+                    return resourceReference.getIndex().equals(index);
+                }
+                return true;
+            }
+            return false;
+        };
+    }
+
+    private void respond(INcApplicationContext appCtx, StorageSizeResponseMessage response)
+            throws HyracksDataException {
+        NCMessageBroker messageBroker = (NCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+        try {
+            messageBroker.sendMessageToPrimaryCC(response);
+        } catch (Exception e) {
+            LOGGER.info("failed to send collection size to cc", e);
+            throw HyracksDataException.create(e);
+        }
+    }
+
+    @Override
+    public boolean isWhispered() {
+        return true;
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageSizeResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageSizeResponseMessage.java
new file mode 100644
index 0000000..09850f1
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageSizeResponseMessage.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.message;
+
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.asterix.common.messaging.api.ICCMessageBroker.ResponseState;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.common.messaging.api.INcResponse;
+import org.apache.commons.lang3.tuple.MutablePair;
+
+public class StorageSizeResponseMessage implements ICcAddressedMessage, INcResponse {
+
+    private static final long serialVersionUID = 1L;
+    private final long reqId;
+    private final long size;
+    private final Throwable failure;
+
+    public StorageSizeResponseMessage(long reqId, long size, Throwable failure) {
+        this.reqId = reqId;
+        this.size = size;
+        this.failure = failure;
+    }
+
+    @Override
+    public void handle(ICcApplicationContext appCtx) {
+        ICCMessageBroker broker = (ICCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+        broker.respond(reqId, this);
+    }
+
+    @Override
+    public void setResult(MutablePair<ResponseState, Object> result) {
+        if (failure != null) {
+            result.setLeft(ResponseState.FAILURE);
+            result.setRight(failure);
+            return;
+        }
+        setResponse(result);
+    }
+
+    private void setResponse(MutablePair<ResponseState, Object> result) {
+        switch (result.getKey()) {
+            case SUCCESS:
+                long currentSize = (long) result.getValue();
+                result.setValue(currentSize + size);
+                break;
+            case UNINITIALIZED:
+                result.setLeft(ResponseState.SUCCESS);
+                result.setValue(size);
+                break;
+            default:
+                break;
+        }
+    }
+
+    @Override
+    public boolean isWhispered() {
+        return true;
+    }
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 0c736a2..17cf78a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -27,6 +27,7 @@
 import java.io.InputStream;
 import java.rmi.RemoteException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Date;
@@ -109,6 +110,7 @@
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.external.util.WriterValidationUtil;
+import org.apache.asterix.external.writer.printer.parquet.SchemaConverterVisitor;
 import org.apache.asterix.lang.common.base.IQueryRewriter;
 import org.apache.asterix.lang.common.base.IReturningStatement;
 import org.apache.asterix.lang.common.base.IRewriterFactory;
@@ -189,6 +191,7 @@
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
+import org.apache.asterix.metadata.entities.EntityDetails;
 import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 import org.apache.asterix.metadata.entities.Feed;
 import org.apache.asterix.metadata.entities.FeedConnection;
@@ -204,6 +207,7 @@
 import org.apache.asterix.metadata.entities.ViewDetails;
 import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 import org.apache.asterix.metadata.functions.ExternalFunctionCompilerUtil;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.DatasetUtil;
 import org.apache.asterix.metadata.utils.IndexUtil;
 import org.apache.asterix.metadata.utils.KeyFieldTypeUtil;
@@ -213,6 +217,7 @@
 import org.apache.asterix.om.base.IAObject;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.AUnionType;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.BuiltinTypeMap;
 import org.apache.asterix.om.types.IAType;
@@ -383,16 +388,20 @@
                         activeNamespace = handleUseDataverseStatement(metadataProvider, stmt);
                         break;
                     case CREATE_DATABASE:
-                        handleCreateDatabaseStatement(metadataProvider, stmt, requestParameters);
+                        handleCreateDatabaseStatement(metadataProvider, stmt, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case CREATE_DATAVERSE:
-                        handleCreateDataverseStatement(metadataProvider, stmt, requestParameters);
+                        handleCreateDataverseStatement(metadataProvider, stmt, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case DATASET_DECL:
-                        handleCreateDatasetStatement(metadataProvider, stmt, hcc, requestParameters);
+                        handleCreateDatasetStatement(metadataProvider, stmt, hcc, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case CREATE_INDEX:
-                        handleCreateIndexStatement(metadataProvider, stmt, hcc, requestParameters);
+                        handleCreateIndexStatement(metadataProvider, stmt, hcc, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case CREATE_FULL_TEXT_FILTER:
                         handleCreateFullTextFilterStatement(metadataProvider, stmt);
@@ -437,7 +446,8 @@
                         handleAdapterDropStatement(metadataProvider, stmt);
                         break;
                     case CREATE_FUNCTION:
-                        handleCreateFunctionStatement(metadataProvider, stmt, stmtRewriter, requestParameters);
+                        handleCreateFunctionStatement(metadataProvider, stmt, stmtRewriter, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case FUNCTION_DROP:
                         handleFunctionDropStatement(metadataProvider, stmt, requestParameters);
@@ -449,13 +459,15 @@
                         handleLibraryDropStatement(metadataProvider, stmt, hcc, requestParameters);
                         break;
                     case CREATE_SYNONYM:
-                        handleCreateSynonymStatement(metadataProvider, stmt, requestParameters);
+                        handleCreateSynonymStatement(metadataProvider, stmt, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case SYNONYM_DROP:
                         handleDropSynonymStatement(metadataProvider, stmt, requestParameters);
                         break;
                     case CREATE_VIEW:
-                        handleCreateViewStatement(metadataProvider, stmt, stmtRewriter, requestParameters);
+                        handleCreateViewStatement(metadataProvider, stmt, stmtRewriter, requestParameters,
+                                Creator.DEFAULT_CREATOR);
                         break;
                     case VIEW_DROP:
                         handleViewDropStatement(metadataProvider, stmt, requestParameters);
@@ -656,7 +668,7 @@
     }
 
     protected void handleCreateDatabaseStatement(MetadataProvider metadataProvider, Statement stmt,
-            IRequestParameters requestParameters) throws Exception {
+            IRequestParameters requestParameters, Creator creator) throws Exception {
         CreateDatabaseStatement stmtCreateDatabase = (CreateDatabaseStatement) stmt;
         String database = stmtCreateDatabase.getDatabaseName().getValue();
         metadataProvider.validateDatabaseName(database, stmt.getSourceLocation());
@@ -665,14 +677,14 @@
         }
         lockUtil.createDatabaseBegin(lockManager, metadataProvider.getLocks(), database);
         try {
-            doCreateDatabaseStatement(metadataProvider, stmtCreateDatabase, requestParameters);
+            doCreateDatabaseStatement(metadataProvider, stmtCreateDatabase, requestParameters, creator);
         } finally {
             metadataProvider.getLocks().unlock();
         }
     }
 
     protected boolean doCreateDatabaseStatement(MetadataProvider mdProvider, CreateDatabaseStatement stmtCreateDatabase,
-            IRequestParameters requestParameters) throws Exception {
+            IRequestParameters requestParameters, Creator creator) throws Exception {
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         mdProvider.setMetadataTxnContext(mdTxnCtx);
         try {
@@ -687,8 +699,10 @@
                             databaseName);
                 }
             }
+
+            beforeTxnCommit(mdProvider, creator, EntityDetails.newDatabase(databaseName));
             MetadataManager.INSTANCE.addDatabase(mdTxnCtx,
-                    new Database(databaseName, false, MetadataUtil.PENDING_NO_OP));
+                    new Database(databaseName, false, MetadataUtil.PENDING_NO_OP, creator));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             return true;
         } catch (Exception e) {
@@ -698,7 +712,7 @@
     }
 
     protected void handleCreateDataverseStatement(MetadataProvider metadataProvider, Statement stmt,
-            IRequestParameters requestParameters) throws Exception {
+            IRequestParameters requestParameters, Creator creator) throws Exception {
         CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
         DataverseName dvName = stmtCreateDataverse.getDataverseName();
         String dbName = stmtCreateDataverse.getDatabaseName();
@@ -709,7 +723,7 @@
         }
         lockUtil.createDataverseBegin(lockManager, metadataProvider.getLocks(), dbName, dvName);
         try {
-            doCreateDataverseStatement(metadataProvider, stmtCreateDataverse, requestParameters);
+            doCreateDataverseStatement(metadataProvider, stmtCreateDataverse, requestParameters, creator);
         } finally {
             metadataProvider.getLocks().unlock();
         }
@@ -717,7 +731,8 @@
 
     @SuppressWarnings("squid:S00112")
     protected boolean doCreateDataverseStatement(MetadataProvider metadataProvider,
-            CreateDataverseStatement stmtCreateDataverse, IRequestParameters requestParameters) throws Exception {
+            CreateDataverseStatement stmtCreateDataverse, IRequestParameters requestParameters, Creator creator)
+            throws Exception {
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         try {
@@ -740,8 +755,9 @@
                             dvName);
                 }
             }
-            MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(),
-                    new Dataverse(dbName, dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP));
+            beforeTxnCommit(metadataProvider, creator, EntityDetails.newDataverse(dbName, dvName));
+            MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dbName,
+                    dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP, creator));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             return true;
         } catch (Exception e) {
@@ -788,7 +804,7 @@
     }
 
     public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+            IHyracksClientConnection hcc, IRequestParameters requestParameters, Creator creator) throws Exception {
         DatasetDecl dd = (DatasetDecl) stmt;
         String datasetName = dd.getName().getValue();
         metadataProvider.validateDatabaseObjectName(dd.getNamespace(), datasetName, stmt.getSourceLocation());
@@ -838,7 +854,7 @@
         try {
             doCreateDatasetStatement(metadataProvider, dd, stmtActiveNamespace, datasetName, itemTypeNamespace,
                     itemTypeExpr, itemTypeName, metaItemTypeExpr, metaItemTypeNamespace, metaItemTypeName, hcc,
-                    requestParameters);
+                    requestParameters, creator);
             if (dd.getQuery() != null) {
                 final IResultSet resultSet = requestParameters.getResultSet();
                 final ResultDelivery resultDelivery = requestParameters.getResultProperties().getDelivery();
@@ -860,8 +876,8 @@
     protected Optional<? extends Dataset> doCreateDatasetStatement(MetadataProvider metadataProvider, DatasetDecl dd,
             Namespace namespace, String datasetName, Namespace itemTypeNamespace, TypeExpression itemTypeExpr,
             String itemTypeName, TypeExpression metaItemTypeExpr, Namespace metaItemTypeNamespace,
-            String metaItemTypeName, IHyracksClientConnection hcc, IRequestParameters requestParameters)
-            throws Exception {
+            String metaItemTypeName, IHyracksClientConnection hcc, IRequestParameters requestParameters,
+            Creator creator) throws Exception {
         DataverseName dataverseName = namespace.getDataverseName();
         String databaseName = namespace.getDatabaseName();
 
@@ -1034,7 +1050,7 @@
             dataset = (Dataset) createDataset(dd, databaseName, dataverseName, datasetName, itemTypeDatabaseName,
                     itemTypeDataverseName, itemTypeName, metaItemTypeDatabaseName, metaItemTypeDataverseName,
                     metaItemTypeName, dsType, compactionPolicy, compactionPolicyProperties, compressionScheme,
-                    datasetFormatInfo, datasetDetails, ngName);
+                    datasetFormatInfo, datasetDetails, ngName, creator);
             MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
 
             if (itemTypeIsInline) {
@@ -1068,6 +1084,8 @@
                     datasetName, requestParameters.isForceDropDataset());
             dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
             MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
+            beforeTxnCommit(metadataProvider, creator,
+                    EntityDetails.newDataset(databaseName, dataverseName, datasetName));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
             if (bActiveTxn) {
@@ -1135,11 +1153,11 @@
             String metaItemTypeDatabase, DataverseName metaItemTypeDataverseName, String metaItemTypeName,
             DatasetType dsType, String compactionPolicy, Map<String, String> compactionPolicyProperties,
             String compressionScheme, DatasetFormatInfo datasetFormatInfo, IDatasetDetails datasetDetails,
-            String ngName) throws AlgebricksException {
+            String ngName, Creator creator) throws AlgebricksException {
         return new Dataset(database, dataverseName, datasetName, itemTypeDatabase, itemTypeDataverseName, itemTypeName,
                 metaItemTypeDatabase, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy,
                 compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(),
-                MetadataUtil.PENDING_ADD_OP, compressionScheme, datasetFormatInfo);
+                MetadataUtil.PENDING_ADD_OP, compressionScheme, datasetFormatInfo, creator);
     }
 
     protected Triple<Namespace, String, Boolean> extractDatasetItemTypeName(Namespace datasetNamespace,
@@ -1275,7 +1293,7 @@
     }
 
     public void handleCreateIndexStatement(MetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+            IHyracksClientConnection hcc, IRequestParameters requestParameters, Creator creator) throws Exception {
         CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
         String datasetName = stmtCreateIndex.getDatasetName().getValue();
         String indexName = stmtCreateIndex.getIndexName().getValue();
@@ -1292,7 +1310,7 @@
                 fullTextConfigName);
         try {
             doCreateIndex(metadataProvider, stmtCreateIndex, databaseName, dataverseName, datasetName, hcc,
-                    requestParameters);
+                    requestParameters, creator);
         } finally {
             metadataProvider.getLocks().unlock();
         }
@@ -1300,7 +1318,7 @@
 
     protected void doCreateIndex(MetadataProvider metadataProvider, CreateIndexStatement stmtCreateIndex,
             String databaseName, DataverseName dataverseName, String datasetName, IHyracksClientConnection hcc,
-            IRequestParameters requestParameters) throws Exception {
+            IRequestParameters requestParameters, Creator creator) throws Exception {
         SourceLocation sourceLoc = stmtCreateIndex.getSourceLocation();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
@@ -1625,10 +1643,11 @@
             }
 
             Index newIndex = new Index(databaseName, dataverseName, datasetName, indexName, indexType, indexDetails,
-                    stmtCreateIndex.isEnforced(), false, MetadataUtil.PENDING_ADD_OP);
+                    stmtCreateIndex.isEnforced(), false, MetadataUtil.PENDING_ADD_OP, creator);
 
             bActiveTxn = false; // doCreateIndexImpl() takes over the current transaction
-            doCreateIndexImpl(hcc, metadataProvider, ds, newIndex, jobFlags, sourceLoc);
+            EntityDetails entityDetails = EntityDetails.newIndex(databaseName, dataverseName, indexName);
+            doCreateIndexImpl(hcc, metadataProvider, ds, newIndex, jobFlags, sourceLoc, creator, entityDetails);
 
         } catch (Exception e) {
             if (bActiveTxn) {
@@ -1773,7 +1792,8 @@
     }
 
     private void doCreateIndexImpl(IHyracksClientConnection hcc, MetadataProvider metadataProvider, Dataset ds,
-            Index index, EnumSet<JobFlag> jobFlags, SourceLocation sourceLoc) throws Exception {
+            Index index, EnumSet<JobFlag> jobFlags, SourceLocation sourceLoc, Creator creator,
+            EntityDetails entityDetails) throws Exception {
         ProgressState progress = ProgressState.NO_PROGRESS;
         boolean bActiveTxn = true;
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
@@ -1852,6 +1872,8 @@
                         "Failed to create job spec for creating index '" + ds.getDatasetName() + "."
                                 + index.getIndexName() + "'");
             }
+            beforeTxnCommit(metadataProvider, creator, entityDetails);
+
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
             progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
@@ -2068,8 +2090,8 @@
             // second, inserting the database record with the PendingDropOp value into the 'Database' collection
             // Note: the delete operation fails if the database cannot be deleted due to metadata dependencies
             MetadataManager.INSTANCE.dropDatabase(mdTxnCtx, databaseName);
-            MetadataManager.INSTANCE.addDatabase(mdTxnCtx,
-                    new Database(databaseName, database.isSystemDatabase(), MetadataUtil.PENDING_DROP_OP));
+            MetadataManager.INSTANCE.addDatabase(mdTxnCtx, new Database(databaseName, database.isSystemDatabase(),
+                    MetadataUtil.PENDING_DROP_OP, database.getCreator()));
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
@@ -2254,8 +2276,8 @@
             // second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
             // Note: the delete operation fails if the dataverse cannot be deleted due to metadata dependencies
             MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, databaseName, dataverseName);
-            MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
-                    new Dataverse(databaseName, dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
+            MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(databaseName, dataverseName,
+                    dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP, dv.getCreator()));
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
@@ -2749,7 +2771,7 @@
     }
 
     public void handleCreateViewStatement(MetadataProvider metadataProvider, Statement stmt,
-            IStatementRewriter stmtRewriter, IRequestParameters requestParameters) throws Exception {
+            IStatementRewriter stmtRewriter, IRequestParameters requestParameters, Creator creator) throws Exception {
         CreateViewStatement cvs = (CreateViewStatement) stmt;
         String viewName = cvs.getViewName();
         metadataProvider.validateDatabaseObjectName(cvs.getNamespace(), viewName, stmt.getSourceLocation());
@@ -2783,7 +2805,7 @@
                 null, false, null, null, true, DatasetType.VIEW, null, metadataProvider);
         try {
             doCreateView(metadataProvider, cvs, databaseName, dataverseName, viewName, itemTypeDatabaseName,
-                    viewItemTypeDataverseName, viewItemTypeName, stmtRewriter, requestParameters);
+                    viewItemTypeDataverseName, viewItemTypeName, stmtRewriter, requestParameters, creator);
         } finally {
             metadataProvider.getLocks().unlock();
             metadataProvider.setDefaultNamespace(activeNamespace);
@@ -2793,7 +2815,7 @@
     protected CreateResult doCreateView(MetadataProvider metadataProvider, CreateViewStatement cvs, String databaseName,
             DataverseName dataverseName, String viewName, String itemTypeDatabaseName,
             DataverseName itemTypeDataverseName, String itemTypeName, IStatementRewriter stmtRewriter,
-            IRequestParameters requestParameters) throws Exception {
+            IRequestParameters requestParameters, Creator creator) throws Exception {
         SourceLocation sourceLoc = cvs.getSourceLocation();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -2959,10 +2981,10 @@
             ViewDetails viewDetails = new ViewDetails(cvs.getViewBody(), dependencies, cvs.getDefaultNull(),
                     primaryKeyFields, foreignKeys, datetimeFormat, dateFormat, timeFormat);
 
-            Dataset view =
-                    new Dataset(databaseName, dataverseName, viewName, itemTypeDatabaseName, itemTypeDataverseName,
-                            itemTypeName, MetadataConstants.METADATA_NODEGROUP_NAME, "", Collections.emptyMap(),
-                            viewDetails, Collections.emptyMap(), DatasetType.VIEW, 0, MetadataUtil.PENDING_NO_OP);
+            Dataset view = new Dataset(databaseName, dataverseName, viewName, itemTypeDatabaseName,
+                    itemTypeDataverseName, itemTypeName, MetadataConstants.METADATA_NODEGROUP_NAME, "",
+                    Collections.emptyMap(), viewDetails, Collections.emptyMap(), DatasetType.VIEW, 0,
+                    MetadataUtil.PENDING_NO_OP, creator);
             if (existingDataset == null) {
                 if (itemTypeIsInline) {
                     MetadataManager.INSTANCE.addDatatype(mdTxnCtx, itemTypeEntity);
@@ -2974,6 +2996,7 @@
                 }
                 MetadataManager.INSTANCE.updateDataset(mdTxnCtx, view);
             }
+            beforeTxnCommit(metadataProvider, creator, EntityDetails.newView(databaseName, dataverseName, viewName));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             return existingDataset != null ? CreateResult.REPLACED : CreateResult.CREATED;
         } catch (Exception e) {
@@ -3065,7 +3088,7 @@
     }
 
     public void handleCreateFunctionStatement(MetadataProvider metadataProvider, Statement stmt,
-            IStatementRewriter stmtRewriter, IRequestParameters requestParameters) throws Exception {
+            IStatementRewriter stmtRewriter, IRequestParameters requestParameters, Creator creator) throws Exception {
         CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
         FunctionSignature signature = cfs.getFunctionSignature();
         DataverseName funDataverse = signature.getDataverseName();
@@ -3098,7 +3121,7 @@
         lockUtil.createFunctionBegin(lockManager, metadataProvider.getLocks(), databaseName, dataverseName,
                 signature.getName(), libraryDatabaseName, libraryDataverseName, libraryName);
         try {
-            doCreateFunction(metadataProvider, cfs, signature, stmtRewriter, requestParameters);
+            doCreateFunction(metadataProvider, cfs, signature, stmtRewriter, requestParameters, creator);
         } finally {
             metadataProvider.getLocks().unlock();
             metadataProvider.setDefaultNamespace(activeNamespace);
@@ -3106,8 +3129,8 @@
     }
 
     protected CreateResult doCreateFunction(MetadataProvider metadataProvider, CreateFunctionStatement cfs,
-            FunctionSignature functionSignature, IStatementRewriter stmtRewriter, IRequestParameters requestParameters)
-            throws Exception {
+            FunctionSignature functionSignature, IStatementRewriter stmtRewriter, IRequestParameters requestParameters,
+            Creator creator) throws Exception {
         DataverseName dataverseName = functionSignature.getDataverseName();
         String databaseName = functionSignature.getDatabaseName();
         SourceLocation sourceLoc = cfs.getSourceLocation();
@@ -3224,7 +3247,7 @@
                 function = new Function(functionSignature, paramNames, paramTypes, returnTypeSignature, null,
                         FunctionKind.SCALAR.toString(), library.getLanguage(), libraryDatabaseName,
                         libraryDataverseName, libraryName, externalIdentifier, cfs.getNullCall(),
-                        cfs.getDeterministic(), cfs.getResources(), dependencies);
+                        cfs.getDeterministic(), cfs.getResources(), dependencies, creator);
             } else {
                 List<Pair<VarIdentifier, TypeExpression>> paramList = cfs.getParameters();
                 int paramCount = paramList.size();
@@ -3263,7 +3286,7 @@
                 newInlineTypes = Collections.emptyMap();
                 function = new Function(functionSignature, paramNames, null, null, cfs.getFunctionBody(),
                         FunctionKind.SCALAR.toString(), compilationProvider.getParserFactory().getLanguage(), null,
-                        null, null, null, null, null, null, dependencies);
+                        null, null, null, null, null, null, dependencies, creator);
             }
 
             if (existingFunction == null) {
@@ -3289,6 +3312,8 @@
                 }
                 MetadataManager.INSTANCE.updateFunction(mdTxnCtx, function);
             }
+            beforeTxnCommit(metadataProvider, creator,
+                    EntityDetails.newFunction(databaseName, dataverseName, function.getName(), function.getArity()));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Installed function: " + functionSignature);
@@ -3809,7 +3834,7 @@
     }
 
     protected void handleCreateSynonymStatement(MetadataProvider metadataProvider, Statement stmt,
-            IRequestParameters requestParameters) throws Exception {
+            IRequestParameters requestParameters, Creator creator) throws Exception {
         CreateSynonymStatement css = (CreateSynonymStatement) stmt;
         metadataProvider.validateDatabaseObjectName(css.getNamespace(), css.getSynonymName(), css.getSourceLocation());
         Namespace stmtActiveNamespace = getActiveNamespace(css.getNamespace());
@@ -3823,14 +3848,16 @@
         }
         lockUtil.createSynonymBegin(lockManager, metadataProvider.getLocks(), databaseName, dataverseName, synonymName);
         try {
-            doCreateSynonym(metadataProvider, css, stmtActiveNamespace, synonymName, objectNamespace, objectName);
+            doCreateSynonym(metadataProvider, css, stmtActiveNamespace, synonymName, objectNamespace, objectName,
+                    creator);
         } finally {
             metadataProvider.getLocks().unlock();
         }
     }
 
     protected CreateResult doCreateSynonym(MetadataProvider metadataProvider, CreateSynonymStatement css,
-            Namespace namespace, String synonymName, Namespace objectNamespace, String objectName) throws Exception {
+            Namespace namespace, String synonymName, Namespace objectNamespace, String objectName, Creator creator)
+            throws Exception {
         String databaseName = namespace.getDatabaseName();
         DataverseName dataverseName = namespace.getDataverseName();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -3855,8 +3882,10 @@
                 throw new CompilationException(ErrorCode.SYNONYM_EXISTS, css.getSourceLocation(), synonymName);
             }
             synonym = new Synonym(databaseName, dataverseName, synonymName, objectNamespace.getDatabaseName(),
-                    objectNamespace.getDataverseName(), objectName);
+                    objectNamespace.getDataverseName(), objectName, creator);
             MetadataManager.INSTANCE.addSynonym(metadataProvider.getMetadataTxnContext(), synonym);
+            beforeTxnCommit(metadataProvider, creator,
+                    EntityDetails.newSynonym(databaseName, dataverseName, synonymName));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             return CreateResult.CREATED;
         } catch (Exception e) {
@@ -4089,6 +4118,37 @@
                         ExternalDataConstants.WRITER_SUPPORTED_ADAPTERS, copyTo.getSourceLocation(), mdTxnCtx,
                         metadataProvider));
 
+                if (ExternalDataConstants.FORMAT_PARQUET
+                        .equalsIgnoreCase(edd.getProperties().get(ExternalDataConstants.KEY_FORMAT))) {
+                    if (copyTo.getType() != null) {
+                        DataverseName dataverseName =
+                                DataverseName.createFromCanonicalForm(ExternalDataConstants.DUMMY_DATAVERSE_NAME);
+                        IAType iaType = translateType(ExternalDataConstants.DUMMY_DATABASE_NAME, dataverseName,
+                                ExternalDataConstants.DUMMY_TYPE_NAME, copyTo.getType(), mdTxnCtx);
+                        edd.getProperties().put(ExternalDataConstants.PARQUET_SCHEMA_KEY,
+                                SchemaConverterVisitor.convertToParquetSchemaString((ARecordType) iaType));
+                    }
+                }
+
+                if (edd.getProperties().get(ExternalDataConstants.KEY_FORMAT)
+                        .equalsIgnoreCase(ExternalDataConstants.FORMAT_CSV_LOWER_CASE)) {
+                    DataverseName dataverseName =
+                            DataverseName.createFromCanonicalForm(ExternalDataConstants.DUMMY_DATAVERSE_NAME);
+                    IAType iaType;
+                    if (copyTo.getType() != null) {
+                        iaType = translateType(ExternalDataConstants.DUMMY_DATABASE_NAME, dataverseName,
+                                ExternalDataConstants.DUMMY_TYPE_NAME, copyTo.getType(), mdTxnCtx);
+                    } else if (copyTo.getTypeExpressionItemType() != null) {
+                        iaType = translateType(ExternalDataConstants.DUMMY_DATABASE_NAME, dataverseName,
+                                ExternalDataConstants.DUMMY_TYPE_NAME, copyTo.getTypeExpressionItemType(), mdTxnCtx);
+                    } else {
+                        throw new CompilationException(ErrorCode.COMPILATION_ERROR,
+                                "TYPE/AS Expression is required for csv format");
+                    }
+                    ARecordType recordType = (ARecordType) iaType;
+                    validateCSVSchema(recordType);
+                    edd.setItemType(recordType);
+                }
                 Map<VarIdentifier, IAObject> externalVars = createExternalVariables(copyTo, stmtParams);
                 // Query Rewriting (happens under the same ongoing metadata transaction)
                 LangRewritingContext langRewritingContext = createLangRewritingContext(metadataProvider,
@@ -4851,7 +4911,7 @@
                     dsDetails.getKeySourceIndicator(), dsDetails.getPrimaryKeyType(), sampleCardinalityTarget, 0, 0,
                     sampleSeed, Collections.emptyMap());
             newIndexPendingAdd = new Index(databaseName, dataverseName, datasetName, newIndexName, sampleIndexType,
-                    newIndexDetailsPendingAdd, false, false, MetadataUtil.PENDING_ADD_OP);
+                    newIndexDetailsPendingAdd, false, false, MetadataUtil.PENDING_ADD_OP, Creator.DEFAULT_CREATOR);
 
             // #. add a new index with PendingAddOp
             MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), newIndexPendingAdd);
@@ -4893,7 +4953,7 @@
                     dsDetails.getKeySourceIndicator(), dsDetails.getPrimaryKeyType(), sampleCardinalityTarget,
                     stats.getCardinality(), stats.getAvgTupleSize(), sampleSeed, stats.getIndexesStats());
             Index newIndexFinal = new Index(databaseName, dataverseName, datasetName, newIndexName, sampleIndexType,
-                    newIndexDetailsFinal, false, false, MetadataUtil.PENDING_NO_OP);
+                    newIndexDetailsFinal, false, false, MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
 
             // #. begin new metadataTxn
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -5157,7 +5217,7 @@
             MetadataManager.INSTANCE.addIndex(mdTxnCtx,
                     new Index(databaseName, dataverseName, datasetName, indexName, index.getIndexType(),
                             index.getIndexDetails(), index.isEnforced(), index.isPrimaryIndex(),
-                            MetadataUtil.PENDING_DROP_OP));
+                            MetadataUtil.PENDING_DROP_OP, index.getCreator()));
         }
     }
 
@@ -5456,6 +5516,7 @@
                             participatingDatasetIds, numParticipatingNodes, numParticipatingPartitions));
                 }
             }
+
             jobId = runTrackJob(hcc, jobSpec, jobFlags, reqId, requestParameters.getClientContextId(), clientRequest);
             if (jId != null) {
                 jId.setValue(jobId);
@@ -5800,10 +5861,35 @@
         }
     }
 
+    protected void beforeTxnCommit(MetadataProvider metadataProvider, Creator creator, EntityDetails entityDetails)
+            throws AlgebricksException {
+        //no op
+    }
+
     protected enum CreateResult {
         NOOP,
         CREATED,
         REPLACED
     }
 
+    private void validateCSVSchema(ARecordType schema) throws CompilationException {
+        final List<String> expectedFieldNames = Arrays.asList(schema.getFieldNames());
+        final List<IAType> expectedFieldTypes = Arrays.asList(schema.getFieldTypes());
+        final int size = expectedFieldNames.size();
+        for (int i = 0; i < size; ++i) {
+            IAType expectedIAType = expectedFieldTypes.get(i);
+            if (!ExternalDataConstants.CSV_WRITER_SUPPORTED_DATA_TYPES.contains(expectedIAType.getTypeTag())) {
+                if (expectedIAType.getTypeTag().equals(ATypeTag.UNION)) {
+                    AUnionType unionType = (AUnionType) expectedIAType;
+                    ATypeTag actualTypeTag = unionType.getActualType().getTypeTag();
+                    if (!ExternalDataConstants.CSV_WRITER_SUPPORTED_DATA_TYPES.contains(actualTypeTag)) {
+                        throw new CompilationException(ErrorCode.TYPE_UNSUPPORTED_CSV_WRITE, actualTypeTag.toString());
+                    }
+                } else {
+                    throw new CompilationException(ErrorCode.TYPE_UNSUPPORTED_CSV_WRITE,
+                            expectedIAType.getTypeTag().toString());
+                }
+            }
+        }
+    }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 0b7e7d0..6928b64 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -91,6 +91,7 @@
 import org.apache.asterix.common.metadata.NamespacePathResolver;
 import org.apache.asterix.common.metadata.NamespaceResolver;
 import org.apache.asterix.common.replication.INcLifecycleCoordinator;
+import org.apache.asterix.common.utils.IdentifierUtil;
 import org.apache.asterix.common.utils.Servlets;
 import org.apache.asterix.external.adapter.factory.AdapterFactoryService;
 import org.apache.asterix.file.StorageComponentProvider;
@@ -165,7 +166,7 @@
         ccServiceCtx.setMessageBroker(new CCMessageBroker(controllerService));
         ccServiceCtx.setPersistedResourceRegistry(new PersistedResourceRegistry());
         configureLoggingLevel(ccServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
-        LOGGER.info("Starting Asterix cluster controller");
+        LOGGER.info("Starting {} cluster controller", IdentifierUtil.productName());
         String strIP = ccServiceCtx.getCCContext().getClusterControllerInfo().getClientNetAddress();
         int port = ccServiceCtx.getCCContext().getClusterControllerInfo().getClientNetPort();
         hcc = new HyracksConnection(strIP, port,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
index 0ff8796..6f8126b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
@@ -79,6 +79,7 @@
 import org.apache.asterix.common.transactions.IRecoveryManager;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
 import org.apache.asterix.common.transactions.IRecoveryManagerFactory;
+import org.apache.asterix.common.utils.IdentifierUtil;
 import org.apache.asterix.common.utils.PrintUtil;
 import org.apache.asterix.common.utils.Servlets;
 import org.apache.asterix.common.utils.StorageConstants;
@@ -148,9 +149,7 @@
             throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
         }
         nodeId = this.ncServiceCtx.getNodeId();
-        if (LOGGER.isInfoEnabled()) {
-            LOGGER.info("Starting Asterix node controller: " + nodeId);
-        }
+        LOGGER.info("Starting {} node controller: {}", IdentifierUtil.productName(), nodeId);
         final NodeControllerService controllerService = (NodeControllerService) ncServiceCtx.getControllerService();
 
         if (System.getProperty("java.rmi.server.hostname") == null) {
@@ -280,9 +279,7 @@
         if (!stopInitiated) {
             runtimeContext.setShuttingdown(true);
             stopInitiated = true;
-            if (LOGGER.isInfoEnabled()) {
-                LOGGER.info("Stopping Asterix node controller: " + nodeId);
-            }
+            LOGGER.info("Stopping {} node controller: {}", IdentifierUtil.productName(), nodeId);
 
             webManager.stop();
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/MetadataBuiltinFunctions.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/MetadataBuiltinFunctions.java
index 9d39088..ea8d5fc 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/MetadataBuiltinFunctions.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/MetadataBuiltinFunctions.java
@@ -31,7 +31,9 @@
 import org.apache.asterix.app.function.StorageComponentsRewriter;
 import org.apache.asterix.app.function.TPCDSAllTablesDataGeneratorRewriter;
 import org.apache.asterix.app.function.TPCDSSingleTableDataGeneratorRewriter;
+import org.apache.asterix.app.function.collectionsize.StorageSizeRewriter;
 import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.typecomputer.impl.AInt64TypeComputer;
 import org.apache.asterix.om.utils.RecordUtil;
 
 public class MetadataBuiltinFunctions {
@@ -106,6 +108,10 @@
                 true);
         BuiltinFunctions.addUnnestFun(QueryPartitionRewriter.QUERY_PARTITION, false);
         BuiltinFunctions.addDatasourceFunction(QueryPartitionRewriter.QUERY_PARTITION, QueryPartitionRewriter.INSTANCE);
+        // storage size
+        BuiltinFunctions.addFunction(StorageSizeRewriter.STORAGE_SIZE, AInt64TypeComputer.INSTANCE, true);
+        BuiltinFunctions.addUnnestFun(StorageSizeRewriter.STORAGE_SIZE, true);
+        BuiltinFunctions.addDatasourceFunction(StorageSizeRewriter.STORAGE_SIZE, StorageSizeRewriter.INSTANCE);
     }
 
     private MetadataBuiltinFunctions() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/StorageUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/StorageUtil.java
new file mode 100644
index 0000000..3eb9d08
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/StorageUtil.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.utils;
+
+import static org.apache.asterix.common.api.IClusterManagementWork.ClusterState.ACTIVE;
+import static org.apache.asterix.common.api.IClusterManagementWork.ClusterState.REBALANCE_REQUIRED;
+import static org.apache.asterix.common.exceptions.ErrorCode.REJECT_BAD_CLUSTER_STATE;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.app.message.StorageSizeRequestMessage;
+import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.api.IMetadataLockManager;
+import org.apache.asterix.common.config.DatasetConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.common.metadata.MetadataConstants;
+import org.apache.asterix.common.metadata.MetadataUtil;
+import org.apache.asterix.messaging.CCMessageBroker;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.entities.Index;
+import org.apache.hyracks.api.util.InvokeUtil;
+
+public class StorageUtil {
+
+    public static long getCollectionSize(ICcApplicationContext appCtx, String database, DataverseName dataverse,
+            String collection, String index) throws Exception {
+        IClusterManagementWork.ClusterState state = appCtx.getClusterStateManager().getState();
+        if (!(state == ACTIVE || state == REBALANCE_REQUIRED)) {
+            throw new RuntimeDataException(REJECT_BAD_CLUSTER_STATE, state);
+        }
+
+        if (!appCtx.getNamespaceResolver().isUsingDatabase()) {
+            database = MetadataConstants.DEFAULT_DATABASE;
+        }
+
+        IMetadataLockManager lockManager = appCtx.getMetadataLockManager();
+        MetadataProvider metadataProvider = MetadataProvider.createWithDefaultNamespace(appCtx);
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        try {
+            lockManager.acquireDatabaseReadLock(metadataProvider.getLocks(), database);
+            lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), database, dataverse);
+            lockManager.acquireDatasetReadLock(metadataProvider.getLocks(), database, dataverse, collection);
+            Dataset dataset = metadataProvider.findDataset(database, dataverse, collection);
+            if (dataset == null) {
+                throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, collection,
+                        MetadataUtil.dataverseName(database, dataverse, metadataProvider.isUsingDatabase()));
+            }
+
+            if (dataset.getDatasetType() != DatasetConfig.DatasetType.INTERNAL) {
+                throw new CompilationException(ErrorCode.STORAGE_SIZE_NOT_APPLICABLE_TO_TYPE, dataset.getDatasetType());
+            }
+
+            if (index != null) {
+                Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), database,
+                        dataverse, collection, index);
+                if (idx == null) {
+                    throw new CompilationException(ErrorCode.UNKNOWN_INDEX, index);
+                }
+            }
+
+            final List<String> ncs = new ArrayList<>(appCtx.getClusterStateManager().getParticipantNodes());
+            CCMessageBroker messageBroker = (CCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+
+            long reqId = messageBroker.newRequestId();
+            List<StorageSizeRequestMessage> requests = new ArrayList<>();
+            for (int i = 0; i < ncs.size(); i++) {
+                requests.add(new StorageSizeRequestMessage(reqId, database, dataverse.getCanonicalForm(), collection,
+                        index));
+            }
+            return (long) messageBroker.sendSyncRequestToNCs(reqId, ncs, requests, TimeUnit.SECONDS.toMillis(60), true);
+        } finally {
+            InvokeUtil.tryWithCleanups(() -> MetadataManager.INSTANCE.commitTransaction(mdTxnCtx),
+                    () -> metadataProvider.getLocks().unlock());
+        }
+    }
+}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/LocalCloudUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/LocalCloudUtil.java
index 9e3fdc4..b50d2f2 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/LocalCloudUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/LocalCloudUtil.java
@@ -57,6 +57,10 @@
     }
 
     public static S3Mock startS3CloudEnvironment(boolean cleanStart) {
+        return startS3CloudEnvironment(cleanStart, false);
+    }
+
+    public static S3Mock startS3CloudEnvironment(boolean cleanStart, boolean createPlaygroundContainer) {
         if (cleanStart) {
             FileUtils.deleteQuietly(new File(MOCK_FILE_BACKEND));
         }
@@ -79,6 +83,12 @@
         S3Client client = builder.build();
         client.createBucket(CreateBucketRequest.builder().bucket(CLOUD_STORAGE_BUCKET).build());
         LOGGER.info("Created bucket {} for cloud storage", CLOUD_STORAGE_BUCKET);
+
+        // added for convenience since some non-external-based tests include an external collection test on this bucket
+        if (createPlaygroundContainer) {
+            client.createBucket(CreateBucketRequest.builder().bucket("playground").build());
+            LOGGER.info("Created bucket {}", "playground");
+        }
         client.close();
         return s3MockServer;
     }
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 8aa25ad..aaee17e 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -53,6 +53,7 @@
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.NoOpLSMTupleFilterCallbackFactory;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.DatasetUtil;
 import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
 import org.apache.asterix.om.types.ARecordType;
@@ -500,7 +501,7 @@
         PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType,
                 mergePolicy.first, mergePolicy.second, filterFields, primaryKeyIndexes, primaryKeyIndicators);
         Dataverse dataverse = new Dataverse(dataset.getDatabaseName(), dataset.getDataverseName(),
-                NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+                NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
         Namespace namespace = new Namespace(dataverse.getDatabaseName(), dataverse.getDataverseName());
         MetadataProvider mdProvider = MetadataProvider.create(
                 (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), namespace);
@@ -528,7 +529,7 @@
         MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         Dataverse dataverse =
                 new Dataverse(primaryIndexInfo.dataset.getDatabaseName(), primaryIndexInfo.dataset.getDataverseName(),
-                        NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+                        NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
         Namespace namespace = new Namespace(dataverse.getDatabaseName(), dataverse.getDataverseName());
         MetadataProvider mdProvider = MetadataProvider.create(
                 (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), namespace);
@@ -746,7 +747,7 @@
             }
             index = Index.createPrimaryIndex(dataset.getDatabaseName(), dataset.getDataverseName(),
                     dataset.getDatasetName(), keyFieldNames, primaryKeyIndicators, keyFieldTypes,
-                    MetadataUtil.PENDING_NO_OP);
+                    MetadataUtil.PENDING_NO_OP, dataset.getCreator());
             List<String> nodes = Collections.singletonList(ExecutionTestUtil.integrationUtil.ncs[0].getId());
             CcApplicationContext appCtx =
                     (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
index 841e81b..460565c 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
@@ -114,9 +114,9 @@
         handler = new ActiveNotificationHandler();
         allDatasets = new ArrayList<>();
         firstDataset = new Dataset(database, dataverseName, "firstDataset", recordTypeDatabaseName, null, null, null,
-                null, null, null, null, null, 0, 0);
+                null, null, null, null, null, 0, 0, null);
         secondDataset = new Dataset(database, dataverseName, "secondDataset", recordTypeDatabaseName, null, null, null,
-                null, null, null, null, null, 0, 0);
+                null, null, null, null, null, 0, 0, null);
         allDatasets.add(firstDataset);
         allDatasets.add(secondDataset);
         AtomicInteger threadCounter = new AtomicInteger(0);
@@ -992,7 +992,7 @@
                 new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.RECOVERING));
         recoveringSubscriber.sync();
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action add = users[1].addDataset(newDataset, listener);
         listener.allowStep();
         runningSubscriber.sync();
@@ -1019,7 +1019,7 @@
         recoveringSubscriber.sync();
         tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED));
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action add = users[1].addDataset(newDataset, listener);
         listener.allowStep();
         tempFailSubscriber.sync();
@@ -1046,7 +1046,7 @@
         recoveringSubscriber.sync();
         tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED));
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action add = users[1].addDataset(newDataset, listener);
         listener.allowStep();
         tempFailSubscriber.sync();
@@ -1065,7 +1065,7 @@
                 new WaitForStateSubscriber(listener, Collections.singleton(ActivityState.STARTING));
         subscriber.sync();
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action createDatasetAction = users[1].addDataset(newDataset, listener);
         listener.allowStep();
         startAction.sync();
@@ -1081,7 +1081,7 @@
     public void testCreateNewDatasetWhileRunning() throws Exception {
         testStartWhenStartSucceed();
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action createDatasetAction = users[1].addDataset(newDataset, listener);
         createDatasetAction.sync();
         assertFailure(createDatasetAction, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY);
@@ -1101,7 +1101,7 @@
                 new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.SUSPENDING, ActivityState.SUSPENDED));
         subscriber.sync();
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action createDatasetAction = users[0].addDataset(newDataset, listener);
         listener.allowStep();
         listener.allowStep();
@@ -1120,7 +1120,7 @@
         testRecoveryFailureAfterOneAttemptCompilationFailure();
         Assert.assertEquals(ActivityState.STOPPED, listener.getState());
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action createDatasetAction = users[0].addDataset(newDataset, listener);
         createDatasetAction.sync();
         assertSuccess(createDatasetAction);
@@ -1553,7 +1553,7 @@
         query.sync();
         assertSuccess(query);
         Dataset newDataset = new Dataset(database, dataverseName, "newDataset", recordTypeDatabaseName, null, null,
-                null, null, null, null, null, null, 0, 0);
+                null, null, null, null, null, null, 0, 0, null);
         Action addDataset = users[1].addDataset(newDataset, listener);
         // blocked by suspension
         Assert.assertFalse(addDataset.isDone());
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageAzTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageAzTest.java
new file mode 100644
index 0000000..508810d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageAzTest.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.test.cloud_storage;
+
+import static org.apache.asterix.api.common.LocalCloudUtil.CLOUD_STORAGE_BUCKET;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Objects;
+import java.util.Random;
+
+import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.test.common.TestExecutor;
+import org.apache.asterix.test.runtime.LangExecutionUtil;
+import org.apache.asterix.testframework.context.TestCaseContext;
+import org.apache.asterix.testframework.xml.Description;
+import org.apache.asterix.testframework.xml.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.MethodSorters;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.common.StorageSharedKeyCredential;
+
+/**
+ * Run tests in cloud deployment environment
+ */
+@RunWith(Parameterized.class)
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class CloudStorageAzTest {
+
+    private static final Logger LOGGER = LogManager.getLogger();
+
+    private final TestCaseContext tcCtx;
+    private static final String SUITE_TESTS = "testsuite_cloud_storage.xml";
+    private static final String ONLY_TESTS = "testsuite_cloud_storage_only.xml";
+    private static final String CONFIG_FILE_NAME = "src/test/resources/cc-cloud-storage-azblob.conf";
+    private static final String DELTA_RESULT_PATH = "results_cloud";
+    private static final String EXCLUDED_TESTS = "MP";
+
+    public CloudStorageAzTest(TestCaseContext tcCtx) {
+        this.tcCtx = tcCtx;
+    }
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        String endpointString = "http://127.0.0.1:15055/devstoreaccount1/" + CLOUD_STORAGE_BUCKET;
+        final String accKey =
+                "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
+        final String accName = "devstoreaccount1";
+
+        BlobServiceClient blobServiceClient = new BlobServiceClientBuilder().endpoint(endpointString)
+                .credential(new StorageSharedKeyCredential(accName, accKey)).buildClient();
+
+        cleanup(blobServiceClient);
+        initialize(blobServiceClient);
+
+        //storage.close(); WHAT IS THIS FOR IN GCS
+
+        TestExecutor testExecutor = new TestExecutor(DELTA_RESULT_PATH);
+        testExecutor.executorId = "cloud";
+        testExecutor.stripSubstring = "//DB:";
+        LangExecutionUtil.setUp(CONFIG_FILE_NAME, testExecutor);
+        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, CONFIG_FILE_NAME);
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        LangExecutionUtil.tearDown();
+    }
+
+    @Parameters(name = "CloudStorageAzBlobTest {index}: {0}")
+    public static Collection<Object[]> tests() throws Exception {
+        long seed = System.nanoTime();
+        Random random = new Random(seed);
+        LOGGER.info("CloudStorageAzBlobTest seed {}", seed);
+        Collection<Object[]> tests = LangExecutionUtil.tests(ONLY_TESTS, SUITE_TESTS);
+        List<Object[]> selected = new ArrayList<>();
+        for (Object[] test : tests) {
+            if (!Objects.equals(((TestCaseContext) test[0]).getTestGroups()[0].getName(), "sqlpp_queries")) {
+                selected.add(test);
+            }
+            // Select 10% of the tests randomly
+            else if (random.nextInt(10) == 0) {
+                selected.add(test);
+            }
+        }
+        return selected;
+    }
+
+    @Test
+    public void test() throws Exception {
+        List<TestCase.CompilationUnit> cu = tcCtx.getTestCase().getCompilationUnit();
+        Assume.assumeTrue(cu.size() > 1 || !EXCLUDED_TESTS.equals(getText(cu.get(0).getDescription())));
+        LangExecutionUtil.test(tcCtx);
+    }
+
+    private static String getText(Description description) {
+        return description == null ? "" : description.getValue();
+    }
+
+    private static void cleanup(BlobServiceClient blobServiceClient) {
+        blobServiceClient.deleteBlobContainerIfExists(CLOUD_STORAGE_BUCKET);
+    }
+
+    private static void initialize(BlobServiceClient blobServiceClient) {
+        blobServiceClient.createBlobContainerIfNotExists(CLOUD_STORAGE_BUCKET);
+    }
+}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java
index 3a03445..c532d9d 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java
@@ -21,9 +21,13 @@
 import static org.apache.asterix.api.common.LocalCloudUtil.CLOUD_STORAGE_BUCKET;
 import static org.apache.asterix.api.common.LocalCloudUtil.MOCK_SERVER_REGION;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
+import java.util.Objects;
+import java.util.Random;
 
+import org.apache.asterix.api.common.LocalCloudUtil;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.test.common.TestExecutor;
 import org.apache.asterix.test.runtime.LangExecutionUtil;
@@ -36,7 +40,6 @@
 import org.junit.Assume;
 import org.junit.BeforeClass;
 import org.junit.FixMethodOrder;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.MethodSorters;
@@ -55,7 +58,6 @@
  */
 @RunWith(Parameterized.class)
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
-@Ignore
 public class CloudStorageGCSTest {
 
     private static final Logger LOGGER = LogManager.getLogger();
@@ -75,6 +77,7 @@
 
     @BeforeClass
     public static void setUp() throws Exception {
+        LocalCloudUtil.startS3CloudEnvironment(true, true);
         Storage storage = StorageOptions.newBuilder().setHost(MOCK_SERVER_HOSTNAME)
                 .setCredentials(NoCredentials.getInstance()).setProjectId(MOCK_SERVER_PROJECT_ID).build().getService();
         cleanup(storage);
@@ -94,7 +97,21 @@
 
     @Parameters(name = "CloudStorageGCSTest {index}: {0}")
     public static Collection<Object[]> tests() throws Exception {
-        return LangExecutionUtil.tests(ONLY_TESTS, SUITE_TESTS);
+        long seed = System.nanoTime();
+        Random random = new Random(seed);
+        LOGGER.info("CloudStorageGCSTest seed {}", seed);
+        Collection<Object[]> tests = LangExecutionUtil.tests(ONLY_TESTS, SUITE_TESTS);
+        List<Object[]> selected = new ArrayList<>();
+        for (Object[] test : tests) {
+            if (!Objects.equals(((TestCaseContext) test[0]).getTestGroups()[0].getName(), "sqlpp_queries")) {
+                selected.add(test);
+            }
+            // Select 10% of the tests randomly
+            else if (random.nextInt(10) == 0) {
+                selected.add(test);
+            }
+        }
+        return selected;
     }
 
     @Test
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageTest.java
index 5f7a037..78f4e55 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageTest.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.test.cloud_storage;
 
+import java.net.URI;
 import java.util.Collection;
 import java.util.List;
 
@@ -40,6 +41,12 @@
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
+import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.CreateBucketRequest;
+
 /**
  * Run tests in cloud deployment environment
  */
@@ -56,6 +63,11 @@
     private static final String DELTA_RESULT_PATH = "results_cloud";
     private static final String EXCLUDED_TESTS = "MP";
 
+    private static final String PLAYGROUND_CONTAINER = "playground";
+    private static final String MOCK_SERVER_REGION = "us-west-2";
+    private static final int MOCK_SERVER_PORT = 8001;
+    private static final String MOCK_SERVER_HOSTNAME = "http://127.0.0.1:" + MOCK_SERVER_PORT;
+
     public CloudStorageTest(TestCaseContext tcCtx) {
         this.tcCtx = tcCtx;
     }
@@ -68,6 +80,15 @@
         testExecutor.stripSubstring = "//DB:";
         LangExecutionUtil.setUp(CONFIG_FILE_NAME, testExecutor);
         System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, CONFIG_FILE_NAME);
+
+        // create the playground bucket and leave it empty, just for external collection-based tests
+        S3ClientBuilder builder = S3Client.builder();
+        URI endpoint = URI.create(MOCK_SERVER_HOSTNAME); // endpoint pointing to S3 mock server
+        builder.region(Region.of(MOCK_SERVER_REGION)).credentialsProvider(AnonymousCredentialsProvider.create())
+                .endpointOverride(endpoint);
+        S3Client client = builder.build();
+        client.createBucket(CreateBucketRequest.builder().bucket(PLAYGROUND_CONTAINER).build());
+        client.close();
     }
 
     @AfterClass
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageUnstableTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageUnstableTest.java
index a517ed4..82a264a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageUnstableTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageUnstableTest.java
@@ -66,7 +66,7 @@
     @BeforeClass
     public static void setUp() throws Exception {
         System.setProperty(CloudRetryableRequestUtil.CLOUD_UNSTABLE_MODE, "true");
-        LocalCloudUtil.startS3CloudEnvironment(true);
+        LocalCloudUtil.startS3CloudEnvironment(true, true);
         TestExecutor testExecutor = new TestExecutor(DELTA_RESULT_PATH);
         testExecutor.executorId = "cloud";
         testExecutor.stripSubstring = "//DB:";
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestConstants.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestConstants.java
index 1a5cb66..0a96943 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestConstants.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestConstants.java
@@ -82,4 +82,12 @@
                 + BLOB_ENDPOINT_PLACEHOLDER + "\")";
         public static final String TEMPLATE_DEFAULT = TEMPLATE;
     }
+
+    public static class HDFS {
+        public static final String HDFS_AUTHENTICATION_DEFAULT = "kerberos";
+        public static final String KERBEROS_PRINCIPAL_DEFAULT = "hdfsuser@EXAMPLE.COM";
+        public static final String KERBEROS_PASSWORD_DEFAULT = "hdfspassword";
+        public static final String KERBEROS_REALM_DEFAULT = "EXAMPLE.COM";
+        public static final String KERBEROS_KDC_DEFAULT = "localhost:8800";
+    }
 }
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index 37527bd..3de0cd7 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -2424,6 +2424,8 @@
                     str = applyAzureSubstitution(str, placeholders);
                 } else if (placeholder.getValue().equalsIgnoreCase("GCS")) {
                     str = applyGCSSubstitution(str, placeholders);
+                } else if (placeholder.getValue().equalsIgnoreCase("HDFS")) {
+                    str = applyHDFSSubstitution(str, placeholders);
                 }
             } else {
                 // Any other place holders, just replace with the value
@@ -2534,6 +2536,11 @@
         return str;
     }
 
+    protected String applyHDFSSubstitution(String str, List<Placeholder> placeholders) {
+        str = setHDFSTemplateDefault(str);
+        return str;
+    }
+
     protected String setAzureTemplate(String str) {
         return str.replace("%template%", TEMPLATE);
     }
@@ -2546,6 +2553,10 @@
         return str;
     }
 
+    protected String setHDFSTemplateDefault(String str) {
+        return str;
+    }
+
     protected void fail(boolean runDiagnostics, TestCaseContext testCaseCtx, CompilationUnit cUnit,
             List<TestFileContext> testFileCtxs, ProcessBuilder pb, File testFile, Exception e) throws Exception {
         if (runDiagnostics) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java
index 70226c8..4024ced 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java
@@ -51,6 +51,7 @@
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
@@ -174,7 +175,8 @@
                 null, DatasetType.INTERNAL, DATASET_ID, 0);
         String database = MetadataUtil.databaseFor(dvName);
         secondaryIndex = new Index(database, dvName, DATASET_NAME, INDEX_NAME, INDEX_TYPE, INDEX_FIELD_NAMES,
-                INDEX_FIELD_INDICATORS, INDEX_FIELD_TYPES, false, false, false, 0, OptionalBoolean.of(false));
+                INDEX_FIELD_INDICATORS, INDEX_FIELD_TYPES, false, false, false, 0, OptionalBoolean.of(false),
+                Creator.DEFAULT_CREATOR);
         taskCtx = null;
         primaryIndexDataflowHelper = null;
         secondaryIndexDataflowHelper = null;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ConcurrentInsertTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ConcurrentInsertTest.java
index b458874..b85cae2 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ConcurrentInsertTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ConcurrentInsertTest.java
@@ -130,7 +130,7 @@
                 StorageTestUtils.DATASET.getDataverseName(), StorageTestUtils.DATASET.getDatasetName(), "TestIndex",
                 IndexType.BTREE, Arrays.asList(Arrays.asList(StorageTestUtils.RECORD_TYPE.getFieldNames()[1])),
                 Arrays.asList(Index.RECORD_INDICATOR), Arrays.asList(BuiltinType.AINT64), false, false, false, 0,
-                OptionalBoolean.of(false));
+                OptionalBoolean.of(false), StorageTestUtils.DATASET.getCreator());
 
         SecondaryIndexInfo secondaryIndexInfo =
                 nc.createSecondaryIndex(primaryIndexInfo, secondaryIndexEntity, StorageTestUtils.STORAGE_MANAGER, 0);
@@ -142,10 +142,10 @@
         secondaryIndex = (TestLsmBtree) secondaryDataflowHelper.getIndexInstance();
         secondaryDataflowHelper.close();
 
-        Index primaryKeyIndexEntity =
-                new Index(StorageTestUtils.DATASET.getDatabaseName(), StorageTestUtils.DATASET.getDataverseName(),
-                        StorageTestUtils.DATASET.getDatasetName(), "PrimaryKeyIndex", IndexType.BTREE, Arrays.asList(),
-                        Arrays.asList(), Arrays.asList(), false, false, false, 0, OptionalBoolean.empty());
+        Index primaryKeyIndexEntity = new Index(StorageTestUtils.DATASET.getDatabaseName(),
+                StorageTestUtils.DATASET.getDataverseName(), StorageTestUtils.DATASET.getDatasetName(),
+                "PrimaryKeyIndex", IndexType.BTREE, Arrays.asList(), Arrays.asList(), Arrays.asList(), false, false,
+                false, 0, OptionalBoolean.empty(), StorageTestUtils.DATASET.getCreator());
 
         SecondaryIndexInfo primaryKeyIndexInfo =
                 nc.createSecondaryIndex(primaryIndexInfo, primaryKeyIndexEntity, StorageTestUtils.STORAGE_MANAGER, 0);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LSMFlushRecoveryTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LSMFlushRecoveryTest.java
index b574c1b..7c1a534 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LSMFlushRecoveryTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LSMFlushRecoveryTest.java
@@ -189,7 +189,7 @@
         secondaryIndexEntity = new Index(dataset.getDatabaseName(), dataset.getDataverseName(),
                 dataset.getDatasetName(), SECONDARY_INDEX_NAME, SECONDARY_INDEX_TYPE, SECONDARY_INDEX_FIELD_NAMES,
                 SECONDARY_INDEX_FIELD_INDICATORS, SECONDARY_INDEX_FIELD_TYPES, false, false, false, 0,
-                OptionalBoolean.of(false));
+                OptionalBoolean.of(false), dataset.getCreator());
 
         primaryIndexInfos = new PrimaryIndexInfo[NUM_PARTITIONS];
         secondaryIndexInfo = new SecondaryIndexInfo[NUM_PARTITIONS];
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
index d921127..cc50f2a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
@@ -161,9 +161,9 @@
                 NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH,
                         partitioningKeys, null, null, null, false, null, null),
                 null, DatasetType.INTERNAL, DATASET_ID, 0);
-        secondaryIndex =
-                new Index(dataset.getDatabaseName(), dvName, DATASET_NAME, INDEX_NAME, INDEX_TYPE, INDEX_FIELD_NAMES,
-                        INDEX_FIELD_INDICATORS, INDEX_FIELD_TYPES, false, false, false, 0, OptionalBoolean.of(false));
+        secondaryIndex = new Index(dataset.getDatabaseName(), dvName, DATASET_NAME, INDEX_NAME, INDEX_TYPE,
+                INDEX_FIELD_NAMES, INDEX_FIELD_INDICATORS, INDEX_FIELD_TYPES, false, false, false, 0,
+                OptionalBoolean.of(false), dataset.getCreator());
         taskCtxs = new IHyracksTaskContext[NUM_PARTITIONS];
         primaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS];
         secondaryIndexDataflowHelpers = new IIndexDataflowHelper[NUM_PARTITIONS];
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java
index 210440d..b268b19 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java
@@ -28,6 +28,7 @@
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Index;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.DatasetUtil;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.transaction.management.resource.DatasetLocalResourceFactory;
@@ -54,7 +55,7 @@
         super(MetadataUtil.databaseFor(dataverseName), dataverseName, datasetName,
                 MetadataUtil.databaseFor(recordTypeDataverseName), recordTypeDataverseName, recordTypeName,
                 nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails, hints, datasetType,
-                datasetId, pendingOp);
+                datasetId, pendingOp, Creator.DEFAULT_CREATOR);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
index 0b66676..0d3102a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
@@ -325,7 +325,7 @@
         Dataset dataset = new Dataset(source.getDatabaseName(), source.getDataverseName(), "ds_" + datasetPostfix,
                 source.getDatabaseName(), source.getDataverseName(), source.getDatasetType().name(),
                 source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(),
-                source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0);
+                source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0, source.getCreator());
         MetadataProvider metadataProvider = MetadataProvider.createWithDefaultNamespace(appCtx);
         final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(writeTxn);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/NonCloudSqlppExecutionTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/NonCloudSqlppExecutionTest.java
new file mode 100644
index 0000000..c403178
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/NonCloudSqlppExecutionTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.test.runtime;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.api.common.LocalCloudUtil;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.test.common.TestExecutor;
+import org.apache.asterix.testframework.context.TestCaseContext;
+import org.apache.hyracks.control.nc.NodeControllerService;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+/**
+ * Runs the SQL++ runtime tests with the storage parallelism.
+ * This version of the tests runs the tests that aren't meant to be run in the cloud because they can have different
+ * output.
+ */
+@RunWith(Parameterized.class)
+public class NonCloudSqlppExecutionTest {
+    protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        final TestExecutor testExecutor = new TestExecutor();
+        LangExecutionUtil.setUp(TEST_CONFIG_FILE_NAME, testExecutor);
+        setNcEndpoints(testExecutor);
+        LocalCloudUtil.startS3CloudEnvironment(true, true);
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        LangExecutionUtil.tearDown();
+    }
+
+    @Parameters(name = "NonCloudSqlppExecutionTest {index}: {0}")
+    public static Collection<Object[]> tests() throws Exception {
+        return LangExecutionUtil.tests("only_sqlpp.xml", "non_cloud_sqlpp_queries.xml");
+    }
+
+    protected TestCaseContext tcCtx;
+
+    public NonCloudSqlppExecutionTest(TestCaseContext tcCtx) {
+        this.tcCtx = tcCtx;
+    }
+
+    @Test
+    public void test() throws Exception {
+        LangExecutionUtil.test(tcCtx);
+    }
+
+    private static void setNcEndpoints(TestExecutor testExecutor) {
+        final NodeControllerService[] ncs = ExecutionTestUtil.integrationUtil.ncs;
+        final Map<String, InetSocketAddress> ncEndPoints = new HashMap<>();
+        final String ip = InetAddress.getLoopbackAddress().getHostAddress();
+        for (NodeControllerService nc : ncs) {
+            final String nodeId = nc.getId();
+            final INcApplicationContext appCtx = (INcApplicationContext) nc.getApplicationContext();
+            int apiPort = appCtx.getExternalProperties().getNcApiPort();
+            ncEndPoints.put(nodeId, InetSocketAddress.createUnresolved(ip, apiPort));
+        }
+        testExecutor.setNcEndPoints(ncEndPoints);
+    }
+}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
index 8d9a5f5..297f565 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
@@ -41,6 +41,7 @@
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.BuiltinType;
@@ -114,7 +115,7 @@
                     NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null,
                     new InternalDatasetDetails(null, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningKeys,
                             null, null, null, false, null, null),
-                    null, DatasetConfig.DatasetType.INTERNAL, DATASET_ID, 0);
+                    null, DatasetConfig.DatasetType.INTERNAL, DATASET_ID, 0, Creator.DEFAULT_CREATOR);
             // create dataset
             TestNodeController.PrimaryIndexInfo indexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE,
                     META_TYPE, null, storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, 0);
diff --git a/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-azblob.conf b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-azblob.conf
new file mode 100644
index 0000000..d11cb5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-azblob.conf
@@ -0,0 +1,72 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+;
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[nc/asterix_nc1]
+txn.log.dir=target/tmp/asterix_nc1/txnlog
+core.dump.dir=target/tmp/asterix_nc1/coredump
+iodevices=target/tmp/asterix_nc1/iodevice1
+iodevices=../asterix-server/target/tmp/asterix_nc1/iodevice2
+nc.api.port=19004
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5006
+
+[nc/asterix_nc2]
+ncservice.port=9091
+txn.log.dir=target/tmp/asterix_nc2/txnlog
+core.dump.dir=target/tmp/asterix_nc2/coredump
+iodevices=target/tmp/asterix_nc2/iodevice1,../asterix-server/target/tmp/asterix_nc2/iodevice2
+nc.api.port=19005
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5007
+
+[nc]
+credential.file=src/test/resources/security/passwd
+python.cmd.autolocate=true
+python.env=FOO=BAR=BAZ,BAR=BAZ
+address=127.0.0.1
+command=asterixnc
+app.class=org.apache.asterix.hyracks.bootstrap.NCApplication
+jvm.args=-Xmx4096m -Dnode.Resolver="org.apache.asterix.external.util.IdentitiyResolverFactory"
+storage.buffercache.size=128MB
+storage.memorycomponent.globalbudget=512MB
+
+[cc]
+address = 127.0.0.1
+app.class=org.apache.asterix.hyracks.bootstrap.CCApplication
+heartbeat.period=2000
+heartbeat.max.misses=25
+credential.file=src/test/resources/security/passwd
+
+[common]
+log.dir = logs/
+log.level = INFO
+compiler.framesize=32KB
+compiler.sortmemory=320KB
+compiler.groupmemory=160KB
+compiler.joinmemory=256KB
+compiler.textsearchmemory=160KB
+compiler.windowmemory=192KB
+compiler.internal.sanitycheck=true
+messaging.frame.size=4096
+messaging.frame.count=512
+cloud.deployment=true
+storage.buffercache.pagesize=32KB
+storage.partitioning=static
+cloud.storage.scheme=azblob
+cloud.storage.bucket=cloud-storage-container
+cloud.storage.region=us-east-2
+cloud.storage.endpoint=http://127.0.0.1:15055
+cloud.storage.anonymous.auth=true
+cloud.storage.cache.policy=lazy
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf
index 3c883a8..0046644 100644
--- a/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf
+++ b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf
@@ -70,3 +70,6 @@
 cloud.storage.endpoint=http://127.0.0.1:4443
 cloud.storage.anonymous.auth=true
 cloud.storage.cache.policy=selective
+cloud.max.write.requests.per.second=1000
+cloud.max.read.requests.per.second=5000
+cloud.write.buffer.size=5
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q2.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q2.plan
index 84f5c1d..6609508 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q2.plan
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q2.plan
@@ -3,40 +3,40 @@
     -- STREAM_LIMIT  |UNPARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
-          -- SORT_MERGE_EXCHANGE [$$260(ASC), $$261(ASC), $$226(ASC) ]  |PARTITIONED|
+          -- SORT_MERGE_EXCHANGE [$$280(ASC), $$281(ASC), $$247(ASC) ]  |PARTITIONED|
             -- STREAM_LIMIT  |PARTITIONED|
               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                -- STABLE_SORT [topK: 100] [$$260(ASC), $$261(ASC), $$226(ASC)]  |PARTITIONED|
+                -- STABLE_SORT [topK: 100] [$$280(ASC), $$281(ASC), $$247(ASC)]  |PARTITIONED|
                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                     -- STREAM_PROJECT  |PARTITIONED|
                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                        -- HYBRID_HASH_JOIN [$$238][$$239]  |PARTITIONED|
-                          -- HASH_PARTITION_EXCHANGE [$$238]  |PARTITIONED|
+                        -- HYBRID_HASH_JOIN [$$260][$$261]  |PARTITIONED|
+                          -- HASH_PARTITION_EXCHANGE [$$260]  |PARTITIONED|
                             -- STREAM_PROJECT  |PARTITIONED|
                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                -- HYBRID_HASH_JOIN [$$245][$$246]  |PARTITIONED|
-                                  -- HASH_PARTITION_EXCHANGE [$$245]  |PARTITIONED|
+                                -- HYBRID_HASH_JOIN [$$267][$$268]  |PARTITIONED|
+                                  -- HASH_PARTITION_EXCHANGE [$$267]  |PARTITIONED|
                                     -- STREAM_PROJECT  |PARTITIONED|
                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                        -- HYBRID_HASH_JOIN [$$251][$$228]  |PARTITIONED|
-                                          -- HASH_PARTITION_EXCHANGE [$$251]  |PARTITIONED|
+                                        -- HYBRID_HASH_JOIN [$$273][$$249]  |PARTITIONED|
+                                          -- HASH_PARTITION_EXCHANGE [$$273]  |PARTITIONED|
                                             -- STREAM_PROJECT  |PARTITIONED|
                                               -- ASSIGN  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                    -- HYBRID_HASH_JOIN [$$226, $$240][$$227, $$249]  |PARTITIONED|
-                                                      -- HASH_PARTITION_EXCHANGE [$$226, $$240]  |PARTITIONED|
+                                                    -- HYBRID_HASH_JOIN [$$247, $$262][$$248, $$271]  |PARTITIONED|
+                                                      -- HASH_PARTITION_EXCHANGE [$$247, $$262]  |PARTITIONED|
                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                            -- HYBRID_HASH_JOIN [$$s_i_id][$$226]  |PARTITIONED|
+                                                            -- HYBRID_HASH_JOIN [$$s_i_id][$$247]  |PARTITIONED|
                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                -- SORT_GROUP_BY[$$271]  |PARTITIONED|
+                                                                -- SORT_GROUP_BY[$$291]  |PARTITIONED|
                                                                         {
                                                                           -- AGGREGATE  |LOCAL|
                                                                             -- NESTED_TUPLE_SOURCE  |LOCAL|
                                                                         }
-                                                                  -- HASH_PARTITION_EXCHANGE [$$271]  |PARTITIONED|
-                                                                    -- SORT_GROUP_BY[$$225]  |PARTITIONED|
+                                                                  -- HASH_PARTITION_EXCHANGE [$$291]  |PARTITIONED|
+                                                                    -- SORT_GROUP_BY[$$245]  |PARTITIONED|
                                                                             {
                                                                               -- AGGREGATE  |LOCAL|
                                                                                 -- NESTED_TUPLE_SOURCE  |LOCAL|
@@ -44,8 +44,8 @@
                                                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                            -- HYBRID_HASH_JOIN [$$247][$$256]  |PARTITIONED|
-                                                                              -- HASH_PARTITION_EXCHANGE [$$247]  |PARTITIONED|
+                                                                            -- HYBRID_HASH_JOIN [$$269][$$277]  |PARTITIONED|
+                                                                              -- HASH_PARTITION_EXCHANGE [$$269]  |PARTITIONED|
                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                   -- ASSIGN  |PARTITIONED|
                                                                                     -- ASSIGN  |PARTITIONED|
@@ -57,11 +57,11 @@
                                                                                                 -- DATASOURCE_SCAN (test.stock)  |PARTITIONED|
                                                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                     -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                                              -- HASH_PARTITION_EXCHANGE [$$256]  |PARTITIONED|
+                                                                              -- HASH_PARTITION_EXCHANGE [$$277]  |PARTITIONED|
                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                    -- HYBRID_HASH_JOIN [$$243][$$255]  |PARTITIONED|
-                                                                                      -- HASH_PARTITION_EXCHANGE [$$243]  |PARTITIONED|
+                                                                                    -- HYBRID_HASH_JOIN [$$265][$$276]  |PARTITIONED|
+                                                                                      -- HASH_PARTITION_EXCHANGE [$$265]  |PARTITIONED|
                                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                                           -- ASSIGN  |PARTITIONED|
                                                                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -72,11 +72,11 @@
                                                                                                       -- DATASOURCE_SCAN (test.supplier)  |PARTITIONED|
                                                                                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                           -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                                                      -- HASH_PARTITION_EXCHANGE [$$255]  |PARTITIONED|
+                                                                                      -- HASH_PARTITION_EXCHANGE [$$276]  |PARTITIONED|
                                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                            -- HYBRID_HASH_JOIN [$$241][$$242]  |PARTITIONED|
-                                                                                              -- HASH_PARTITION_EXCHANGE [$$241]  |PARTITIONED|
+                                                                                            -- HYBRID_HASH_JOIN [$$263][$$264]  |PARTITIONED|
+                                                                                              -- HASH_PARTITION_EXCHANGE [$$263]  |PARTITIONED|
                                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                                   -- ASSIGN  |PARTITIONED|
                                                                                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -89,16 +89,17 @@
                                                                                                                   -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
                                                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                 -- REPLICATE  |PARTITIONED|
-                                                                                                  -- HASH_PARTITION_EXCHANGE [$$242]  |PARTITIONED|
+                                                                                                  -- HASH_PARTITION_EXCHANGE [$$264]  |PARTITIONED|
                                                                                                     -- STREAM_PROJECT  |PARTITIONED|
                                                                                                       -- STREAM_SELECT  |PARTITIONED|
-                                                                                                        -- ASSIGN  |PARTITIONED|
-                                                                                                          -- STREAM_PROJECT  |PARTITIONED|
-                                                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                                              -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
-                                                                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                              -- HASH_PARTITION_EXCHANGE [$$226]  |PARTITIONED|
+                                                                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                                                                          -- ASSIGN  |PARTITIONED|
+                                                                                                            -- STREAM_PROJECT  |PARTITIONED|
+                                                                                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                                                -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
+                                                                                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                              -- HASH_PARTITION_EXCHANGE [$$247]  |PARTITIONED|
                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                   -- STREAM_SELECT  |PARTITIONED|
                                                                     -- ASSIGN  |PARTITIONED|
@@ -107,7 +108,7 @@
                                                                           -- DATASOURCE_SCAN (test.item)  |PARTITIONED|
                                                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                      -- HASH_PARTITION_EXCHANGE [$$227, $$249]  |PARTITIONED|
+                                                      -- HASH_PARTITION_EXCHANGE [$$248, $$271]  |PARTITIONED|
                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                           -- ASSIGN  |PARTITIONED|
                                                             -- STREAM_PROJECT  |PARTITIONED|
@@ -120,7 +121,7 @@
                                                                           -- DATASOURCE_SCAN (test.stock)  |PARTITIONED|
                                                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                          -- HASH_PARTITION_EXCHANGE [$$228]  |PARTITIONED|
+                                          -- HASH_PARTITION_EXCHANGE [$$249]  |PARTITIONED|
                                             -- STREAM_PROJECT  |PARTITIONED|
                                               -- ASSIGN  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
@@ -133,7 +134,7 @@
                                                               -- DATASOURCE_SCAN (test.supplier)  |PARTITIONED|
                                                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                   -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                  -- HASH_PARTITION_EXCHANGE [$$246]  |PARTITIONED|
+                                  -- HASH_PARTITION_EXCHANGE [$$268]  |PARTITIONED|
                                     -- STREAM_PROJECT  |PARTITIONED|
                                       -- ASSIGN  |PARTITIONED|
                                         -- STREAM_PROJECT  |PARTITIONED|
@@ -151,12 +152,13 @@
                               -- ASSIGN  |PARTITIONED|
                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                   -- REPLICATE  |PARTITIONED|
-                                    -- HASH_PARTITION_EXCHANGE [$$242]  |PARTITIONED|
+                                    -- HASH_PARTITION_EXCHANGE [$$264]  |PARTITIONED|
                                       -- STREAM_PROJECT  |PARTITIONED|
                                         -- STREAM_SELECT  |PARTITIONED|
-                                          -- ASSIGN  |PARTITIONED|
-                                            -- STREAM_PROJECT  |PARTITIONED|
-                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
-                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            -- ASSIGN  |PARTITIONED|
+                                              -- STREAM_PROJECT  |PARTITIONED|
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q20.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q20.plan
index 3b04b56..dbbbea3 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q20.plan
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q20.plan
@@ -2,49 +2,49 @@
   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
-        -- SORT_MERGE_EXCHANGE [$$178(ASC) ]  |PARTITIONED|
-          -- STABLE_SORT [$$178(ASC)]  |PARTITIONED|
+        -- SORT_MERGE_EXCHANGE [$$191(ASC) ]  |PARTITIONED|
+          -- STABLE_SORT [$$191(ASC)]  |PARTITIONED|
             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
               -- STREAM_PROJECT  |PARTITIONED|
                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                  -- HYBRID_HASH_JOIN [$$172][$$173]  |PARTITIONED|
-                    -- HASH_PARTITION_EXCHANGE [$$172]  |PARTITIONED|
+                  -- HYBRID_HASH_JOIN [$$186][$$187]  |PARTITIONED|
+                    -- HASH_PARTITION_EXCHANGE [$$186]  |PARTITIONED|
                       -- STREAM_PROJECT  |PARTITIONED|
                         -- STREAM_SELECT  |PARTITIONED|
                           -- STREAM_PROJECT  |PARTITIONED|
                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                              -- PRE_CLUSTERED_GROUP_BY[$$164]  |PARTITIONED|
+                              -- PRE_CLUSTERED_GROUP_BY[$$177]  |PARTITIONED|
                                       {
                                         -- AGGREGATE  |LOCAL|
                                           -- STREAM_SELECT  |LOCAL|
                                             -- NESTED_TUPLE_SOURCE  |LOCAL|
                                       }
                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                  -- STABLE_SORT [$$164(ASC)]  |PARTITIONED|
-                                    -- HASH_PARTITION_EXCHANGE [$$164]  |PARTITIONED|
+                                  -- STABLE_SORT [$$177(ASC)]  |PARTITIONED|
+                                    -- HASH_PARTITION_EXCHANGE [$$177]  |PARTITIONED|
                                       -- STREAM_PROJECT  |PARTITIONED|
                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                          -- HYBRID_HASH_JOIN [$$171][$$143]  |PARTITIONED|
-                                            -- HASH_PARTITION_EXCHANGE [$$171]  |PARTITIONED|
+                                          -- HYBRID_HASH_JOIN [$$185][$$156]  |PARTITIONED|
+                                            -- HASH_PARTITION_EXCHANGE [$$185]  |PARTITIONED|
                                               -- STREAM_PROJECT  |PARTITIONED|
                                                 -- ASSIGN  |PARTITIONED|
                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                     -- DATASOURCE_SCAN (test.supplier)  |PARTITIONED|
                                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                         -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                            -- HASH_PARTITION_EXCHANGE [$$143]  |PARTITIONED|
+                                            -- HASH_PARTITION_EXCHANGE [$$156]  |PARTITIONED|
                                               -- STREAM_PROJECT  |PARTITIONED|
                                                 -- ASSIGN  |PARTITIONED|
                                                   -- STREAM_PROJECT  |PARTITIONED|
                                                     -- STREAM_SELECT  |PARTITIONED|
                                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                        -- SORT_GROUP_BY[$$187, $$188, $$189]  |PARTITIONED|
+                                                        -- SORT_GROUP_BY[$$200, $$201, $$202]  |PARTITIONED|
                                                                 {
                                                                   -- AGGREGATE  |LOCAL|
                                                                     -- NESTED_TUPLE_SOURCE  |LOCAL|
                                                                 }
-                                                          -- HASH_PARTITION_EXCHANGE [$$187, $$188, $$189]  |PARTITIONED|
-                                                            -- SORT_GROUP_BY[$$170, $$160, $$161]  |PARTITIONED|
+                                                          -- HASH_PARTITION_EXCHANGE [$$200, $$201, $$202]  |PARTITIONED|
+                                                            -- SORT_GROUP_BY[$$184, $$173, $$174]  |PARTITIONED|
                                                                     {
                                                                       -- AGGREGATE  |LOCAL|
                                                                         -- NESTED_TUPLE_SOURCE  |LOCAL|
@@ -52,42 +52,43 @@
                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                    -- HYBRID_HASH_JOIN [$$170][$$174]  |PARTITIONED|
-                                                                      -- HASH_PARTITION_EXCHANGE [$$170]  |PARTITIONED|
+                                                                    -- HYBRID_HASH_JOIN [$$184][$$188]  |PARTITIONED|
+                                                                      -- HASH_PARTITION_EXCHANGE [$$184]  |PARTITIONED|
                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                           -- STREAM_SELECT  |PARTITIONED|
                                                                             -- STREAM_PROJECT  |PARTITIONED|
                                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                -- PRE_CLUSTERED_GROUP_BY[$$166]  |PARTITIONED|
+                                                                                -- PRE_CLUSTERED_GROUP_BY[$$179]  |PARTITIONED|
                                                                                         {
                                                                                           -- AGGREGATE  |LOCAL|
                                                                                             -- STREAM_SELECT  |LOCAL|
                                                                                               -- NESTED_TUPLE_SOURCE  |LOCAL|
                                                                                         }
                                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                    -- STABLE_SORT [$$166(ASC)]  |PARTITIONED|
-                                                                                      -- HASH_PARTITION_EXCHANGE [$$166]  |PARTITIONED|
+                                                                                    -- STABLE_SORT [$$179(ASC)]  |PARTITIONED|
+                                                                                      -- HASH_PARTITION_EXCHANGE [$$179]  |PARTITIONED|
                                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                            -- HYBRID_HASH_JOIN [$$170][$$117]  |PARTITIONED|
-                                                                                              -- HASH_PARTITION_EXCHANGE [$$170]  |PARTITIONED|
+                                                                                            -- HYBRID_HASH_JOIN [$$184][$$130]  |PARTITIONED|
+                                                                                              -- HASH_PARTITION_EXCHANGE [$$184]  |PARTITIONED|
                                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                                   -- ASSIGN  |PARTITIONED|
                                                                                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                       -- DATASOURCE_SCAN (test.stock)  |PARTITIONED|
                                                                                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                           -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                                                              -- HASH_PARTITION_EXCHANGE [$$117]  |PARTITIONED|
+                                                                                              -- HASH_PARTITION_EXCHANGE [$$130]  |PARTITIONED|
                                                                                                 -- ASSIGN  |PARTITIONED|
                                                                                                   -- STREAM_PROJECT  |PARTITIONED|
                                                                                                     -- STREAM_SELECT  |PARTITIONED|
-                                                                                                      -- ASSIGN  |PARTITIONED|
-                                                                                                        -- STREAM_PROJECT  |PARTITIONED|
-                                                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                                            -- DATASOURCE_SCAN (test.item)  |PARTITIONED|
-                                                                                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                                      -- HASH_PARTITION_EXCHANGE [$$174]  |PARTITIONED|
+                                                                                                      -- STREAM_PROJECT  |PARTITIONED|
+                                                                                                        -- ASSIGN  |PARTITIONED|
+                                                                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                                              -- DATASOURCE_SCAN (test.item)  |PARTITIONED|
+                                                                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                      -- HASH_PARTITION_EXCHANGE [$$188]  |PARTITIONED|
                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                           -- STREAM_SELECT  |PARTITIONED|
                                                                             -- STREAM_PROJECT  |PARTITIONED|
@@ -101,7 +102,7 @@
                                                                                             -- DATASOURCE_SCAN (test.orders)  |PARTITIONED|
                                                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                 -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                    -- HASH_PARTITION_EXCHANGE [$$173]  |PARTITIONED|
+                    -- HASH_PARTITION_EXCHANGE [$$187]  |PARTITIONED|
                       -- STREAM_PROJECT  |PARTITIONED|
                         -- STREAM_SELECT  |PARTITIONED|
                           -- ASSIGN  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q3.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q3.plan
index a2ffaea..1d83160 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q3.plan
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/ch2/ch2_q3.plan
@@ -2,16 +2,16 @@
   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
-        -- SORT_MERGE_EXCHANGE [$$204(DESC), $$o_entry_d(ASC) ]  |PARTITIONED|
-          -- STABLE_SORT [$$204(DESC), $$o_entry_d(ASC)]  |PARTITIONED|
+        -- SORT_MERGE_EXCHANGE [$$220(DESC), $$o_entry_d(ASC) ]  |PARTITIONED|
+          -- STABLE_SORT [$$220(DESC), $$o_entry_d(ASC)]  |PARTITIONED|
             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-              -- SORT_GROUP_BY[$$211, $$212, $$213, $$214]  |PARTITIONED|
+              -- SORT_GROUP_BY[$$226, $$227, $$228, $$229]  |PARTITIONED|
                       {
                         -- AGGREGATE  |LOCAL|
                           -- NESTED_TUPLE_SOURCE  |LOCAL|
                       }
-                -- HASH_PARTITION_EXCHANGE [$$211, $$212, $$213, $$214]  |PARTITIONED|
-                  -- SORT_GROUP_BY[$$206, $$188, $$189, $$190]  |PARTITIONED|
+                -- HASH_PARTITION_EXCHANGE [$$226, $$227, $$228, $$229]  |PARTITIONED|
+                  -- SORT_GROUP_BY[$$221, $$204, $$205, $$206]  |PARTITIONED|
                           {
                             -- AGGREGATE  |LOCAL|
                               -- NESTED_TUPLE_SOURCE  |LOCAL|
@@ -19,16 +19,16 @@
                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                       -- STREAM_PROJECT  |PARTITIONED|
                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                          -- HYBRID_HASH_JOIN [$$188, $$189, $$206][$$201, $$202, $$203]  |PARTITIONED|
-                            -- HASH_PARTITION_EXCHANGE [$$188, $$189, $$206]  |PARTITIONED|
+                          -- HYBRID_HASH_JOIN [$$204, $$205, $$221][$$217, $$218, $$219]  |PARTITIONED|
+                            -- HASH_PARTITION_EXCHANGE [$$204, $$205, $$221]  |PARTITIONED|
                               -- STREAM_PROJECT  |PARTITIONED|
                                 -- ASSIGN  |PARTITIONED|
                                   -- STREAM_PROJECT  |PARTITIONED|
                                     -- UNNEST  |PARTITIONED|
                                       -- STREAM_PROJECT  |PARTITIONED|
                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                          -- HYBRID_HASH_JOIN [$$198, $$188, $$189][$$197, $$199, $$200]  |PARTITIONED|
-                                            -- HASH_PARTITION_EXCHANGE [$$198, $$188, $$189]  |PARTITIONED|
+                                          -- HYBRID_HASH_JOIN [$$214, $$204, $$205][$$213, $$215, $$216]  |PARTITIONED|
+                                            -- HASH_PARTITION_EXCHANGE [$$214, $$204, $$205]  |PARTITIONED|
                                               -- STREAM_SELECT  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- ASSIGN  |PARTITIONED|
@@ -37,16 +37,17 @@
                                                         -- DATASOURCE_SCAN (test.orders)  |PARTITIONED|
                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                             -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                            -- HASH_PARTITION_EXCHANGE [$$197, $$199, $$200]  |PARTITIONED|
+                                            -- HASH_PARTITION_EXCHANGE [$$213, $$215, $$216]  |PARTITIONED|
                                               -- STREAM_PROJECT  |PARTITIONED|
                                                 -- STREAM_SELECT  |PARTITIONED|
-                                                  -- ASSIGN  |PARTITIONED|
-                                                    -- STREAM_PROJECT  |PARTITIONED|
-                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                        -- DATASOURCE_SCAN (test.customer)  |PARTITIONED|
-                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                            -- HASH_PARTITION_EXCHANGE [$$201, $$202, $$203]  |PARTITIONED|
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    -- ASSIGN  |PARTITIONED|
+                                                      -- STREAM_PROJECT  |PARTITIONED|
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          -- DATASOURCE_SCAN (test.customer)  |PARTITIONED|
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            -- HASH_PARTITION_EXCHANGE [$$217, $$218, $$219]  |PARTITIONED|
                               -- STREAM_PROJECT  |PARTITIONED|
                                 -- ASSIGN  |PARTITIONED|
                                   -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results_cbo/ch2/ch2_q2.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results_cbo/ch2/ch2_q2.plan
index c1d568c..c719b4b 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/results_cbo/ch2/ch2_q2.plan
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results_cbo/ch2/ch2_q2.plan
@@ -3,40 +3,40 @@
     -- STREAM_LIMIT  |UNPARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
-          -- SORT_MERGE_EXCHANGE [$$274(ASC), $$275(ASC), $$240(ASC) ]  |PARTITIONED|
+          -- SORT_MERGE_EXCHANGE [$$280(ASC), $$281(ASC), $$247(ASC) ]  |PARTITIONED|
             -- STREAM_LIMIT  |PARTITIONED|
               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                -- STABLE_SORT [topK: 100] [$$274(ASC), $$275(ASC), $$240(ASC)]  |PARTITIONED|
+                -- STABLE_SORT [topK: 100] [$$280(ASC), $$281(ASC), $$247(ASC)]  |PARTITIONED|
                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                     -- STREAM_PROJECT  |PARTITIONED|
                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                        -- HYBRID_HASH_JOIN [$$252][$$253]  |PARTITIONED|
-                          -- HASH_PARTITION_EXCHANGE [$$252]  |PARTITIONED|
+                        -- HYBRID_HASH_JOIN [$$260][$$261]  |PARTITIONED|
+                          -- HASH_PARTITION_EXCHANGE [$$260]  |PARTITIONED|
                             -- STREAM_PROJECT  |PARTITIONED|
                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                -- HYBRID_HASH_JOIN [$$259][$$260]  |PARTITIONED|
-                                  -- HASH_PARTITION_EXCHANGE [$$259]  |PARTITIONED|
+                                -- HYBRID_HASH_JOIN [$$267][$$268]  |PARTITIONED|
+                                  -- HASH_PARTITION_EXCHANGE [$$267]  |PARTITIONED|
                                     -- STREAM_PROJECT  |PARTITIONED|
                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                        -- HYBRID_HASH_JOIN [$$265][$$242]  |PARTITIONED|
-                                          -- HASH_PARTITION_EXCHANGE [$$265]  |PARTITIONED|
+                                        -- HYBRID_HASH_JOIN [$$273][$$249]  |PARTITIONED|
+                                          -- HASH_PARTITION_EXCHANGE [$$273]  |PARTITIONED|
                                             -- STREAM_PROJECT  |PARTITIONED|
                                               -- ASSIGN  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                    -- HYBRID_HASH_JOIN [$$240, $$254][$$241, $$263]  |PARTITIONED|
-                                                      -- HASH_PARTITION_EXCHANGE [$$240, $$254]  |PARTITIONED|
+                                                    -- HYBRID_HASH_JOIN [$$247, $$262][$$248, $$271]  |PARTITIONED|
+                                                      -- HASH_PARTITION_EXCHANGE [$$247, $$262]  |PARTITIONED|
                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                            -- HYBRID_HASH_JOIN [$$s_i_id][$$240]  |PARTITIONED|
+                                                            -- HYBRID_HASH_JOIN [$$s_i_id][$$247]  |PARTITIONED|
                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                -- SORT_GROUP_BY[$$285]  |PARTITIONED|
+                                                                -- SORT_GROUP_BY[$$291]  |PARTITIONED|
                                                                         {
                                                                           -- AGGREGATE  |LOCAL|
                                                                             -- NESTED_TUPLE_SOURCE  |LOCAL|
                                                                         }
-                                                                  -- HASH_PARTITION_EXCHANGE [$$285]  |PARTITIONED|
-                                                                    -- SORT_GROUP_BY[$$239]  |PARTITIONED|
+                                                                  -- HASH_PARTITION_EXCHANGE [$$291]  |PARTITIONED|
+                                                                    -- SORT_GROUP_BY[$$245]  |PARTITIONED|
                                                                             {
                                                                               -- AGGREGATE  |LOCAL|
                                                                                 -- NESTED_TUPLE_SOURCE  |LOCAL|
@@ -44,16 +44,16 @@
                                                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                            -- HYBRID_HASH_JOIN [$$255][$$256]  |PARTITIONED|
+                                                                            -- HYBRID_HASH_JOIN [$$263][$$264]  |PARTITIONED|
                                                                               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                    -- HYBRID_HASH_JOIN [$$257][$$269]  |PARTITIONED|
+                                                                                    -- HYBRID_HASH_JOIN [$$265][$$276]  |PARTITIONED|
                                                                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                            -- HYBRID_HASH_JOIN [$$261][$$270]  |PARTITIONED|
-                                                                                              -- HASH_PARTITION_EXCHANGE [$$261]  |PARTITIONED|
+                                                                                            -- HYBRID_HASH_JOIN [$$269][$$277]  |PARTITIONED|
+                                                                                              -- HASH_PARTITION_EXCHANGE [$$269]  |PARTITIONED|
                                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                                   -- ASSIGN  |PARTITIONED|
                                                                                                     -- ASSIGN  |PARTITIONED|
@@ -65,7 +65,7 @@
                                                                                                                 -- DATASOURCE_SCAN (test.stock)  |PARTITIONED|
                                                                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                                                     -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                                                              -- HASH_PARTITION_EXCHANGE [$$270]  |PARTITIONED|
+                                                                                              -- HASH_PARTITION_EXCHANGE [$$277]  |PARTITIONED|
                                                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                                                   -- ASSIGN  |PARTITIONED|
                                                                                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -92,13 +92,14 @@
                                                                                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                                     -- STREAM_PROJECT  |PARTITIONED|
                                                                                       -- STREAM_SELECT  |PARTITIONED|
-                                                                                        -- ASSIGN  |PARTITIONED|
-                                                                                          -- STREAM_PROJECT  |PARTITIONED|
-                                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                              -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
-                                                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                              -- HASH_PARTITION_EXCHANGE [$$240]  |PARTITIONED|
+                                                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                                                          -- ASSIGN  |PARTITIONED|
+                                                                                            -- STREAM_PROJECT  |PARTITIONED|
+                                                                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                                -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
+                                                                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                              -- HASH_PARTITION_EXCHANGE [$$247]  |PARTITIONED|
                                                                 -- STREAM_PROJECT  |PARTITIONED|
                                                                   -- STREAM_SELECT  |PARTITIONED|
                                                                     -- ASSIGN  |PARTITIONED|
@@ -107,7 +108,7 @@
                                                                           -- DATASOURCE_SCAN (test.item)  |PARTITIONED|
                                                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                                      -- HASH_PARTITION_EXCHANGE [$$241, $$263]  |PARTITIONED|
+                                                      -- HASH_PARTITION_EXCHANGE [$$248, $$271]  |PARTITIONED|
                                                         -- STREAM_PROJECT  |PARTITIONED|
                                                           -- ASSIGN  |PARTITIONED|
                                                             -- STREAM_PROJECT  |PARTITIONED|
@@ -120,7 +121,7 @@
                                                                           -- DATASOURCE_SCAN (test.stock)  |PARTITIONED|
                                                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                          -- HASH_PARTITION_EXCHANGE [$$242]  |PARTITIONED|
+                                          -- HASH_PARTITION_EXCHANGE [$$249]  |PARTITIONED|
                                             -- STREAM_PROJECT  |PARTITIONED|
                                               -- ASSIGN  |PARTITIONED|
                                                 -- STREAM_PROJECT  |PARTITIONED|
@@ -133,7 +134,7 @@
                                                               -- DATASOURCE_SCAN (test.supplier)  |PARTITIONED|
                                                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                                   -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                                  -- HASH_PARTITION_EXCHANGE [$$260]  |PARTITIONED|
+                                  -- HASH_PARTITION_EXCHANGE [$$268]  |PARTITIONED|
                                     -- STREAM_PROJECT  |PARTITIONED|
                                       -- ASSIGN  |PARTITIONED|
                                         -- STREAM_PROJECT  |PARTITIONED|
@@ -146,7 +147,7 @@
                                                       -- DATASOURCE_SCAN (test.nation)  |PARTITIONED|
                                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                                           -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-                          -- HASH_PARTITION_EXCHANGE [$$253]  |PARTITIONED|
+                          -- HASH_PARTITION_EXCHANGE [$$261]  |PARTITIONED|
                             -- STREAM_PROJECT  |PARTITIONED|
                               -- ASSIGN  |PARTITIONED|
                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -154,9 +155,10 @@
                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                       -- STREAM_PROJECT  |PARTITIONED|
                                         -- STREAM_SELECT  |PARTITIONED|
-                                          -- ASSIGN  |PARTITIONED|
-                                            -- STREAM_PROJECT  |PARTITIONED|
-                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
-                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            -- ASSIGN  |PARTITIONED|
+                                              -- STREAM_PROJECT  |PARTITIONED|
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  -- DATASOURCE_SCAN (test.region)  |PARTITIONED|
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/non_cloud_sqlpp_queries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/non_cloud_sqlpp_queries.xml
new file mode 100644
index 0000000..f379c0b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/non_cloud_sqlpp_queries.xml
@@ -0,0 +1,93 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries_sqlpp"
+            QueryFileExtension=".sqlpp">
+  <test-group name="sqlpp_queries">
+    <test-group name="storage-size">
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="collection-exists">
+          <output-dir compare="Clean-JSON">collection-exists</output-dir>
+          <source-location>false</source-location>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="non-constant-argument">
+          <output-dir compare="Clean-JSON">non-constant-argument</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Compilation error: Function asterix.storage-size expects constant arguments while arg[0] is of type VARIABLE</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="collection-does-not-exist">
+          <output-dir compare="Clean-JSON">collection-does-not-exist</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Cannot find dataset with name doesNotExistCollection in dataverse testScope</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="non-string-argument">
+          <output-dir compare="Clean-JSON">non-string-argument</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Type mismatch: function storage-size expects its 1st input parameter to be of type string, but the actual input type is bigint</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="view-not-collection">
+          <output-dir compare="Clean-JSON">view-not-collection</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Cannot find dataset with name testCollection_vw in dataverse testScope</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="external-collection">
+          <output-dir compare="Clean-JSON">external-collection</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Retrieving storage size is not applicable to type: EXTERNAL.</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="index-exists">
+          <output-dir compare="Clean-JSON">index-exists</output-dir>
+          <source-location>false</source-location>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="index-does-not-exist">
+          <output-dir compare="Clean-JSON">index-does-not-exist</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Cannot find index with name testCollection_idx_fake</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="invalid-arguments-count">
+          <output-dir compare="Clean-JSON">invalid-arguments-count</output-dir>
+          <source-location>false</source-location>
+          <expected-error>Invalid number of arguments for function storage-size</expected-error>
+          <expected-error>Invalid number of arguments for function storage-size</expected-error>
+        </compilation-unit>
+      </test-case>
+      <test-case FilePath="storage-size/datasource-function">
+        <compilation-unit name="sanity">
+          <output-dir compare="Clean-JSON">sanity</output-dir>
+          <source-location>false</source-location>
+        </compilation-unit>
+      </test-case>
+    </test-group>
+  </test-group>
+</test-suite>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.1.ddl.sqlpp
new file mode 100644
index 0000000..ad97753
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.1.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.3.query.sqlpp
new file mode 100644
index 0000000..f19b678
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+select element test.array_countn((
+    select element x
+    from  [1,2,3,"four"] as x
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_02/countn_02.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_02/countn_02.1.query.sqlpp
new file mode 100644
index 0000000..261dfa0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_02/countn_02.1.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+select value array_countn(x) from [
+{"x":[1,2,3]},
+{"x":[1,2,null]},
+{"x":[1,"two"]},
+{"x":[1,"two", null]},
+{"x":[null]},
+{"x":[]},
+{"a":[1,2,3]}
+] as list
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_distinct/countn_distinct.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_distinct/countn_distinct.1.query.sqlpp
new file mode 100644
index 0000000..526c423
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_distinct/countn_distinct.1.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ {
+   't1': array_countn(distinct [10,20,30,10,20,30,10]),
+   't2': array_countn(distinct [null,missing,null,missing]),
+   't3': array_countn(distinct [40,null,missing,50,40,null,missing,50,40]),
+   't4': array_countn(distinct [10,20,30,10,20,"thirty",20]),
+   't5': array_countn(distinct [])
+ }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.1.ddl.sqlpp
new file mode 100644
index 0000000..0dcc111
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.1.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ *                  without an aggregate combiner.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.2.update.sqlpp
new file mode 100644
index 0000000..de14e51
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ *                  without an aggregate combiner.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.3.query.sqlpp
new file mode 100644
index 0000000..3eec594
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.3.query.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that array_countn aggregation correctly returns null for an empty stream,
+ *                  without an aggregate combiner.
+ * Success        : Yes
+ */
+
+select element array_countn((
+    select element x
+    from  [1,2,3] as x
+    where (x > 10)
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.1.ddl.sqlpp
new file mode 100644
index 0000000..29f79ca
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ *                  with an aggregate combiner.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+ closed {
+  id : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.2.update.sqlpp
new file mode 100644
index 0000000..379d8e8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ *                  with an aggregate combiner.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.3.query.sqlpp
new file mode 100644
index 0000000..c9ae713
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.3.query.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that array_count aggregation correctly returns 0 for an empty stream,
+ *                  with an aggregate combiner.
+ * Success        : Yes
+ */
+
+use test;
+
+
+select element test.array_count((
+    select element x.val
+    from  Test as x
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..6788afc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.NumericType as
+{
+  id : bigint,
+  int8Field : tinyint?,
+  int16Field : smallint?,
+  int32Field : integer?,
+  int64Field : bigint?,
+  floatField : float?,
+  doubleField : double?
+};
+
+create external  dataset Numeric(NumericType) using localfs((`path`=`asterix_nc1://data/nontagged/numericData.json`),(`format`=`adm`));
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.3.query.sqlpp
new file mode 100644
index 0000000..8a84b02
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+{'sql-count':test.array_countn((
+    select element x.doubleField
+    from  Numeric as x
+))};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.1.ddl.sqlpp
new file mode 100644
index 0000000..6a5a783
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn without nulls.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.2.update.sqlpp
new file mode 100644
index 0000000..f0dcccb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn without nulls.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.3.query.sqlpp
new file mode 100644
index 0000000..dce1b5d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.3.query.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn without nulls.
+ * Success        : Yes
+ */
+
+use test;
+
+
+with  i8 as test.array_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3')]),
+      i16 as test.array_countn([test.smallint('1'),test.smallint('2'),test.smallint('3')]),
+      i32 as test.array_countn([test.integer('1'),test.integer('2'),test.integer('3')]),
+      i64 as test.array_countn([test.bigint('1'),test.bigint('2'),test.bigint('3')]),
+      f as test.array_countn([test.float('1'),test.float('2'),test.float('3')]),
+      d as test.array_countn([test.double('1'),test.double('2'),test.double('3')]),
+      s as test.array_countn(['a','b','c'])
+select element i
+from  [i8,i16,i32,i64,f,d,s] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
new file mode 100644
index 0000000..b924afc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn with an empty list.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
new file mode 100644
index 0000000..ee0152f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn with an empty list.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
new file mode 100644
index 0000000..901dda1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn with an empty list.
+ * Success        : Yes
+ */
+
+use test;
+
+
+select element test.array_countn([]);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..7a939bd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn with nulls.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.2.update.sqlpp
new file mode 100644
index 0000000..63eec3e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn with nulls.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.3.query.sqlpp
new file mode 100644
index 0000000..0f2c33a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.3.query.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of array_countn with nulls.
+ * Success        : Yes
+ */
+
+use test;
+
+
+with  i8 as test.array_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3'),null]),
+      i16 as test.array_countn([test.smallint('1'),test.smallint('2'),test.smallint('3'),null]),
+      i32 as test.array_countn([test.integer('1'),test.integer('2'),test.integer('3'),null]),
+      i64 as test.array_countn([test.bigint('1'),test.bigint('2'),test.bigint('3'),null]),
+      f as test.array_countn([test.float('1'),test.float('2'),test.float('3'),null]),
+      d as test.array_countn([test.double('1'),test.double('2'),test.double('3'),null]),
+      s as test.array_countn(['a','b','c',null]),
+      na as test.array_countn([null]),
+      n as test.array_countn(null),
+      m as test.array_countn(missing)
+select element i
+from  [i8,i16,i32,i64,f,d,s,na,n,m] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.1.ddl.sqlpp
new file mode 100644
index 0000000..ad97753
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.1.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.3.query.sqlpp
new file mode 100644
index 0000000..31e7b07
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.3.query.sqlpp
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select element test.strict_countn((
+    select element x
+    from  [1,2,3,"four"] as x
+));
+
+select element test.countn(x) from [
+{"x":1},{"x":2},{"x":3},{"x":"four"},{"x":null},{"x":[1,2,3]},{"x":"1"},{"x":{"k":1}}
+] as list;
+
+select n, test.countn(x) from [
+  {"x":1,"n":"a"},
+  {"x":2,"n":"a"},
+  {"x":3,"n":"b"},
+  {"x":"four","n":"a"},
+  {"x":null,"n":"b"},
+  {"x":[1,2,3],"n":"a"},
+  {"x":"1","n":"b"},
+  {"x":{"k":1},"n":"b"},
+  {"n":"b"}
+] as list group by n;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_02/countn_02.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_02/countn_02.1.query.sqlpp
new file mode 100644
index 0000000..4a10509
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_02/countn_02.1.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+select value strict_countn(x) from [
+{"x":[1,2,3]},
+{"x":[1,2,null]},
+{"x":[1,"two"]},
+{"x":[1,"two", null]},
+{"x":[null]},
+{"x":[]},
+{"a":[1,2,3]}
+] as list
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_distinct/countn_distinct.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_distinct/countn_distinct.1.query.sqlpp
new file mode 100644
index 0000000..699d182
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_distinct/countn_distinct.1.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ {
+   't1': strict_countn(distinct [10,20,30,10,20,30,10]),
+   't2': strict_countn(distinct [null,missing,null,missing]),
+   't3': strict_countn(distinct [40,null,missing,50,40,null,missing,50,40]),
+   't4': strict_countn(distinct [10,20,30,10,20,"thirty",20]),
+   't5': strict_countn(distinct [])
+ }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.1.ddl.sqlpp
new file mode 100644
index 0000000..55cfa23
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.1.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that countn aggregation correctly returns 0 for an empty stream,
+ *                  without an aggregate combiner.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.2.update.sqlpp
new file mode 100644
index 0000000..dcb559a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that countn aggregation correctly returns 0 for an empty stream,
+ *                  without an aggregate combiner.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.3.query.sqlpp
new file mode 100644
index 0000000..9228733
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.3.query.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that countn aggregation correctly returns null for an empty stream,
+ *                  without an aggregate combiner.
+ * Success        : Yes
+ */
+
+select element strict_countn((
+    select element x
+    from  [1,2,3] as x
+    where (x > 10)
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.1.ddl.sqlpp
new file mode 100644
index 0000000..215fd1b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that countn aggregation correctly returns 0 for an empty stream,
+ *                  with an aggregate combiner.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+ closed {
+  id : integer,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.2.update.sqlpp
new file mode 100644
index 0000000..d8ebd7f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that countn aggregation correctly returns 0 for an empty stream,
+ *                  with an aggregate combiner.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.3.query.sqlpp
new file mode 100644
index 0000000..2844438
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.3.query.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests that countn aggregation correctly returns null for an empty stream,
+ *                  with an aggregate combiner.
+ * Success        : Yes
+ */
+
+use test;
+
+
+select element test.strict_countn((
+    select element x.val
+    from  Test as x
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..3f2c5bf
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.NumericType as
+{
+  id : integer,
+  int8Field : tinyint?,
+  int16Field : smallint?,
+  int32Field : integer?,
+  int64Field : bigint?,
+  floatField : float?,
+  doubleField : double?
+};
+
+create external  dataset Numeric(NumericType) using localfs((`path`=`asterix_nc1://data/nontagged/numericData.json`),(`format`=`adm`));
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.3.query.sqlpp
new file mode 100644
index 0000000..d5a60c4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+{'count':test.strict_countn((
+    select element x.doubleField
+    from  Numeric as x
+))};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.1.ddl.sqlpp
new file mode 100644
index 0000000..1172330
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of count without nulls.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.2.update.sqlpp
new file mode 100644
index 0000000..411b984
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of count without nulls.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.3.query.sqlpp
new file mode 100644
index 0000000..dd1b016
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.3.query.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of count without nulls.
+ * Success        : Yes
+ */
+
+use test;
+
+
+with  i8 as test.strict_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3')]),
+      i16 as test.strict_countn([test.smallint('1'),test.smallint('2'),test.smallint('3')]),
+      i32 as test.strict_countn([test.integer('1'),test.integer('2'),test.integer('3')]),
+      i64 as test.strict_countn([test.bigint('1'),test.bigint('2'),test.bigint('3')]),
+      f as test.strict_countn([test.float('1'),test.float('2'),test.float('3')]),
+      d as test.strict_countn([test.double('1'),test.double('2'),test.double('3')]),
+      s as test.strict_countn(['a','b','c'])
+select element i
+from  [i8,i16,i32,i64,f,d,s] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
new file mode 100644
index 0000000..bbcfff8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of countn with an empty list.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
new file mode 100644
index 0000000..e6cd404
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of countn with an empty list.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
new file mode 100644
index 0000000..201aef3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of countn with an empty list.
+ * Success        : Yes
+ */
+
+use test;
+
+
+select element test.strict_countn([]);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..cb7c70c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of countn with nulls.
+ * Success        : Yes
+ */
+
+drop  dataverse test if exists;
+create  dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.2.update.sqlpp
new file mode 100644
index 0000000..b7da5af
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of countn with nulls.
+ * Success        : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.3.query.sqlpp
new file mode 100644
index 0000000..393d16e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.3.query.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests the scalar version of countn with nulls.
+ * Success        : Yes
+ */
+
+use test;
+
+
+with  i8 as test.strict_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3'),null]),
+      i16 as test.strict_countn([test.smallint('1'),test.smallint('2'),test.smallint('3'),null]),
+      i32 as test.strict_countn([test.integer('1'),test.integer('2'),test.integer('3'),null]),
+      i64 as test.strict_countn([test.bigint('1'),test.bigint('2'),test.bigint('3'),null]),
+      f as test.strict_countn([test.float('1'),test.float('2'),test.float('3'),null]),
+      d as test.strict_countn([test.double('1'),test.double('2'),test.double('3'),null]),
+      s as test.strict_countn(['a','b','c',null]),
+      na as test.strict_countn([null]),
+      n as test.strict_countn(null),
+      m as test.strict_countn(missing)
+select element i
+from  [i8,i16,i32,i64,f,d,s,na,n,m] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.1.ddl.sqlpp
new file mode 100755
index 0000000..8103562
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.1.ddl.sqlpp
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+CREATE TYPE openType AS {id: int};
+CREATE DATASET ds(openType) primary key id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.2.update.sqlpp
new file mode 100755
index 0000000..a9171ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.2.update.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+UPSERT INTO ds {"id": 1, "arr": [0, 1, 2, 3, 4, 5]};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.query.sqlpp
new file mode 100755
index 0000000..91a479a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.query.sqlpp
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+with arr as [0, 1, 2, 3, 4, 5] select value
+{
+  "t1": (arr[2]),
+  "t2": (arr[2:4]),
+  "t3": (arr[2:10]),
+  "t4": (arr[-7:4]),
+  "t5": (arr[-7:10]),
+  "t6": (arr[-3:4]),
+  "t7": (arr[2:-1]),
+  "t8": (arr[2:]),
+  "t9": (arr[10:]),
+  "t10": (arr[-7:]),
+  "t11": (arr[:]),
+  "t12": (arr[0]),
+  "t13": (arr[:4]),
+  "t14": (arr[:10]),
+  "t15": (arr[:-7]),
+  "t16": ([][0:]),
+  "t17": ([][:0]),
+  "t18": (select value ds.arr[1:3] from ds),
+  "t19": (select value ds.arr[1:] from ds),
+  "t20": (select value ds.arr[:3] from ds),
+  "t21": (select value ds.arr[] from ds),
+  "t22": (select value ds.arr[:] from ds),
+  "t20": (select value ds.arr[:10] from ds)
+};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.4.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.4.ddl.sqlpp
new file mode 100755
index 0000000..43604c6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.4.ddl.sqlpp
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATASET ds;
+DROP TYPE openType;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-does-not-exist/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-does-not-exist/test.040.query.sqlpp
new file mode 100644
index 0000000..dc66c29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-does-not-exist/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "doesNotExistCollection") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-does-not-exist/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-does-not-exist/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-does-not-exist/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.010.ddl.sqlpp
new file mode 100644
index 0000000..a2ccf84
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.010.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.040.query.sqlpp
new file mode 100644
index 0000000..d4442c7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/collection-exists/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.010.ddl.sqlpp
new file mode 100644
index 0000000..5ae3310
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.010.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
+
+CREATE TYPE test AS open {};
+CREATE EXTERNAL DATASET testExternalCollection(test) USING S3 (
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("region"="us-west-2"),
+("serviceEndpoint"="http://127.0.0.1:8001"),
+("container"="playground"),
+("definition"="json-data/reviews/single-line/json"),
+("format"="json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.040.query.sqlpp
new file mode 100644
index 0000000..9915cfd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testExternalCollection") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/external-collection/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.010.ddl.sqlpp
new file mode 100644
index 0000000..a2ccf84
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.010.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.021.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.021.update.sqlpp
new file mode 100644
index 0000000..18ca3d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.021.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+CREATE INDEX testCollection_idx ON testCollection(some_value: int);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.040.query.sqlpp
new file mode 100644
index 0000000..905a20a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection", "testCollection_idx_fake") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-does-not-exist/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.010.ddl.sqlpp
new file mode 100644
index 0000000..a2ccf84
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.010.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.021.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.021.update.sqlpp
new file mode 100644
index 0000000..18ca3d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.021.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+CREATE INDEX testCollection_idx ON testCollection(some_value: int);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.040.query.sqlpp
new file mode 100644
index 0000000..cb60cb9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection", "testCollection_idx") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/index-exists/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.010.ddl.sqlpp
new file mode 100644
index 0000000..a2ccf84
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.010.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.040.query.sqlpp
new file mode 100644
index 0000000..d376541
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.041.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.041.query.sqlpp
new file mode 100644
index 0000000..b395ed5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.041.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection", "testIndex", "tooManyArgs") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/invalid-arguments-count/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.010.ddl.sqlpp
new file mode 100644
index 0000000..a2ccf84
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.010.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.040.query.sqlpp
new file mode 100644
index 0000000..aea887f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.040.query.sqlpp
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+SELECT storage_size(v.id, v.id, v.id) FROM testCollection AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-constant-argument/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.010.ddl.sqlpp
new file mode 100644
index 0000000..a2ccf84
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.010.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.040.query.sqlpp
new file mode 100644
index 0000000..ddb1996
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size(1, 2, 3) AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/non-string-argument/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.010.ddl.sqlpp
new file mode 100644
index 0000000..84d75e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.010.ddl.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection PRIMARY KEY (id: int);
+
+CREATE INDEX testCollection_idx ON testCollection(someField1: int);
+
+CREATE COLLECTION collectionAndIndexes PRIMARY KEY (id: int);
+CREATE COLLECTION collectionOnly PRIMARY KEY (id: int);
+CREATE COLLECTION indexOnly PRIMARY KEY (id: int)
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.020.update.sqlpp
new file mode 100644
index 0000000..f08857d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.020.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {
+    "id": intVal,
+    "someField1": intVal,
+    "someField2": uuid(),
+    "someField3": {"firstName": uuid(), "lastName": uuid(), "location": {"lon": intVal * 2 + intVal, "lat": intVal * 7 + intVal}}
+    }
+    FROM range(1, 100000) intVal
+);
+
+INSERT INTO collectionAndIndexes({"id": 1, "valueField": (select value size from storage_size("testDatabase", "testScope", "testCollection") as x)[0]});
+INSERT INTO collectionOnly({"id": 1, "valueField": (select value size from storage_size("testDatabase", "testScope", "testCollection", "testCollection") as x)[0]});
+INSERT INTO indexOnly({"id": 1, "valueField": (select value size from storage_size("testDatabase", "testScope", "testCollection", "testCollection_idx") as x)[0]});
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.040.query.sqlpp
new file mode 100644
index 0000000..f37a396
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.040.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+SELECT a.valueField > b.valueField AS collectionAndIndexesLargerThanCollectionOnly, b.valueField > c.valueField AS collectionOnlyLargerThanIndexOnly
+FROM collectionAndIndexes a,
+     collectionOnly b,
+     indexOnly c
+WHERE a.id = b.id
+  AND a.id = c.id
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/sanity/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.010.ddl.sqlpp
new file mode 100644
index 0000000..3d9ea2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.010.ddl.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
+CREATE DATABASE testDatabase;
+CREATE DATAVERSE testDatabase.testScope;
+USE testDatabase.testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
+
+CREATE VIEW testCollection_vw(id int)
+DEFAULT NULL datetime "YYYY-MM-DD hh:mm:ss" AS
+SELECT t.id
+FROM testCollection t;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.020.update.sqlpp
new file mode 100644
index 0000000..4383138
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testDatabase.testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.040.query.sqlpp
new file mode 100644
index 0000000..cd88a1e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection_vw") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cloud_storage/storage-size/datasource-function/view-not-collection/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp
new file mode 100644
index 0000000..56e79c8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+  id: integer,
+  name : string
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp
new file mode 100644
index 0000000..d2a376c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+insert into TestCollection({"id":18, "name": "Virat" , "dateType":date("1988-11-05"), "timeType": time("03:10:00.493Z") , "boolType" : false , "doubleType" : 0.75, "datetimeType" : datetime("1900-02-01T00:00:00") });
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp
new file mode 100644
index 0000000..a95146f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO hdfs
+PATH ("copy-to-result", "parquet-cover-data-types")
+TYPE ( {   name : string,  id : int,  dateType : date,   timeType : time,  boolType : boolean,   doubleType : double,  datetimeType : datetime   } )
+WITH {
+    "hdfs":"hdfs://127.0.0.1:31888/",
+    "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp
new file mode 100644
index 0000000..f23893b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+};
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING hdfs
+(
+  ("hdfs"="hdfs://127.0.0.1:31888/"),
+  ("path"="copy-to-result/parquet-cover-data-types/"),
+  ("include"="*.parquet"),
+  ("input-format"="parquet-input-format"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp
new file mode 100644
index 0000000..b03fc5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp
new file mode 100644
index 0000000..6be9489
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+    id: integer
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.02.update.sqlpp
new file mode 100644
index 0000000..d569ee5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.02.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+INSERT INTO TestCollection {"id":2,"name":{"first":["power","star"]}};
+INSERT INTO TestCollection {"id":5,"name":{"first":[]}};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.03.update.sqlpp
new file mode 100644
index 0000000..5a66928
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.03.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+select c.* from TestCollection c
+    ) toWriter
+TO hdfs
+PATH ("copy-to-result", "parquet-empty-array")
+TYPE ( { id : int,  name : { first :  [ string ] } } )
+WITH {
+    "hdfs":"hdfs://127.0.0.1:31888",
+    "format":"parquet"
+    };
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp
new file mode 100644
index 0000000..a7ee67f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+    };
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING hdfs
+(
+  ("hdfs"="hdfs://127.0.0.1:31888/"),
+  ("path"="copy-to-result/parquet-empty-array/"),
+  ("include"="*.parquet"),
+  ("input-format"="parquet-input-format"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.05.query.sqlpp
new file mode 100644
index 0000000..b03fc5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-empty-array/parquet-empty-array.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.01.ddl.sqlpp
new file mode 100644
index 0000000..76970a5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.01.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.02.update.sqlpp
new file mode 100644
index 0000000..1183752
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.02.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select "123" as id
+) toWriter
+TO hdfs
+PATH ("copy-to-result", "parquet-simple")
+TYPE ( {id:string} )
+WITH {
+    "hdfs":"hdfs://127.0.0.1:31888",
+    "format":"parquet",
+    "version" : "2"
+};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.03.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.03.ddl.sqlpp
new file mode 100644
index 0000000..e31c9b0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.03.ddl.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE EXTERNAL DATASET DatasetCopy(ColumnType2) USING hdfs
+(
+  ("hdfs"="hdfs://127.0.0.1:31888/"),
+  ("path"="copy-to-result/parquet-simple"),
+  ("format" = "parquet"),
+  ("input-format"="parquet-input-format"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.04.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.04.query.sqlpp
new file mode 100644
index 0000000..5aeedb8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-simple/parquet-simple.04.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id
+FROM DatasetCopy c
+ORDER BY c.id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.01.ddl.sqlpp
new file mode 100644
index 0000000..f890e0d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+    id: string
+};
+
+CREATE DATASET DummyTweetDataset(ColumnType1)
+PRIMARY KEY id WITH {
+    "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.02.update.sqlpp
new file mode 100644
index 0000000..83a1140
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET DummyTweetDataset USING localfs
+(
+    ("path" = "asterix_nc1://data/hdfs/parquet/dummy_tweet.json"),
+    ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.03.update.sqlpp
new file mode 100644
index 0000000..9bae74b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.03.update.sqlpp
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   SELECT c.* FROM DummyTweetDataset c
+) toWriter
+TO hdfs
+PATH ("copy-to-result", "parquet-tweet")
+TYPE ( {
+               coordinates: {
+                 coordinates: [
+                   double
+                 ],
+                 `type` : string
+               },
+               created_at: string,
+               entities: {
+                 urls: [
+                   {
+                     display_url: string,
+                     expanded_url: string,
+                     indices: [
+                       int
+                     ],
+                     url: string
+                   }
+                 ],
+                 user_mentions: [
+                   {
+                     id: int,
+                     id_str: string,
+                     indices: [
+                       int
+                     ],
+                     name: string,
+                     screen_name: string
+                   }
+                 ]
+               },
+               favorite_count: int,
+               favorited: boolean,
+               filter_level: string,
+               geo: {
+                 coordinates: [
+                   double
+                 ],
+                 `type`: string
+               },
+               id: string,
+               id_str: string,
+               in_reply_to_screen_name: string,
+               in_reply_to_status_id: int,
+               in_reply_to_status_id_str: string,
+               in_reply_to_user_id: int,
+               in_reply_to_user_id_str: string,
+               is_quote_status: boolean,
+               lang: string,
+               place: {
+                 bounding_box: {
+                   coordinates: [
+                     [
+                       [
+                         double
+                       ]
+                     ]
+                   ],
+                   `type`: string
+                 },
+                 country: string,
+                 country_code: string,
+                 full_name: string,
+                 id: string,
+                 name: string,
+                 place_type: string,
+                 url: string
+               },
+               possibly_sensitive: boolean,
+               quoted_status: {
+                 created_at: string,
+                 entities: {
+                   user_mentions: [
+                     {
+                       id: int,
+                       id_str: string,
+                       indices: [
+                         int
+                       ],
+                       name: string,
+                       screen_name: string
+                     }
+                   ]
+                 },
+                 favorite_count: int,
+                 favorited: boolean,
+                 filter_level: string,
+                 id: int,
+                 id_str: string,
+                 in_reply_to_screen_name: string,
+                 in_reply_to_status_id: int,
+                 in_reply_to_status_id_str: string,
+                 in_reply_to_user_id: int,
+                 in_reply_to_user_id_str: string,
+                 is_quote_status: boolean,
+                 lang: string,
+                 retweet_count: int,
+                 retweeted: boolean,
+                 source: string,
+                 text: string,
+                 truncated: boolean,
+                 user: {
+                   contributors_enabled: boolean,
+                   created_at: string,
+                   default_profile: boolean,
+                   default_profile_image: boolean,
+                   description: string,
+                   favourites_count: int,
+                   followers_count: int,
+                   friends_count: int,
+                   geo_enabled: boolean,
+                   id: int,
+                   id_str: string,
+                   is_translator: boolean,
+                   lang: string,
+                   listed_count: int,
+                   name: string,
+                   profile_background_color: string,
+                   profile_background_image_url: string,
+                   profile_background_image_url_https: string,
+                   profile_background_tile: boolean,
+                   profile_banner_url: string,
+                   profile_image_url: string,
+                   profile_image_url_https: string,
+                   profile_link_color: string,
+                   profile_sidebar_border_color: string,
+                   profile_sidebar_fill_color: string,
+                   profile_text_color: string,
+                   profile_use_background_image: boolean,
+                   protected: boolean,
+                   screen_name: string,
+                   statuses_count: int,
+                   verified: boolean
+                 }
+               },
+               quoted_status_id: int,
+               quoted_status_id_str: string,
+               retweet_count: int,
+               retweeted: boolean,
+               source: string,
+               text: string,
+               timestamp_ms: string,
+               truncated: boolean,
+               user: {
+                 contributors_enabled: boolean,
+                 created_at: string,
+                 default_profile: boolean,
+                 default_profile_image: boolean,
+                 description: string,
+                 favourites_count: int,
+                 followers_count: int,
+                 friends_count: int,
+                 geo_enabled: boolean,
+                 id: int,
+                 id_str: string,
+                 is_translator: boolean,
+                 lang: string,
+                 listed_count: int,
+                 location: string,
+                 name: string,
+                 profile_background_color: string,
+                 profile_background_image_url: string,
+                 profile_background_image_url_https: string,
+                 profile_background_tile: boolean,
+                 profile_banner_url: string,
+                 profile_image_url: string,
+                 profile_image_url_https: string,
+                 profile_link_color: string,
+                 profile_sidebar_border_color: string,
+                 profile_sidebar_fill_color: string,
+                 profile_text_color: string,
+                 profile_use_background_image: boolean,
+                 protected: boolean,
+                 screen_name: string,
+                 statuses_count: int,
+                 time_zone: string,
+                 url: string,
+                 utc_offset: int,
+                 verified: boolean
+               }
+             } )
+WITH {
+    "hdfs":"hdfs://127.0.0.1:31888",
+    "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.04.ddl.sqlpp
new file mode 100644
index 0000000..62467d2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.04.ddl.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET DummyTweetDatasetCopy(ColumnType2) USING hdfs
+(
+  ("hdfs"="hdfs://127.0.0.1:31888/"),
+  ("path"="copy-to-result/parquet-tweet/"),
+  ("include"="*.parquet"),
+  ("input-format"="parquet-input-format"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.05.query.sqlpp
new file mode 100644
index 0000000..13587f6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-tweet/parquet-tweet.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM DummyTweetDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.01.ddl.sqlpp
new file mode 100644
index 0000000..dfc64ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+    id: int
+};
+
+CREATE DATASET NameCommentDataset(ColumnType1)
+PRIMARY KEY id WITH {
+    "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.02.update.sqlpp
new file mode 100644
index 0000000..8591369
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET NameCommentDataset USING localfs
+(
+    ("path" = "asterix_nc1://data/hdfs/parquet/id_name_comment.json"),
+    ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.03.update.sqlpp
new file mode 100644
index 0000000..79b2dda
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.03.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   SELECT c.* FROM NameCommentDataset c
+) toWriter
+TO hdfs
+PATH ("copy-to-result", "parquet-utf8")
+TYPE ( { comment:string, id:bigint, name:string } )
+WITH {
+    "hdfs":"hdfs://127.0.0.1:31888",
+    "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.04.ddl.sqlpp
new file mode 100644
index 0000000..49c9b0f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.04.ddl.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET NameCommentDatasetCopy(ColumnType2) USING hdfs
+(
+  ("hdfs"="hdfs://127.0.0.1:31888"),
+  ("path"="copy-to-result/parquet-utf8/"),
+  ("include"="*.parquet"),
+  ("input-format"="parquet-input-format"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.05.query.sqlpp
new file mode 100644
index 0000000..17cd027
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to-hdfs/parquet-utf8/parquet-utf8.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM NameCommentDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.01.ddl.sqlpp
new file mode 100644
index 0000000..80a8c34
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.01.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE ColumnType AS {
+  id: bigint,
+  name: string,
+  amount: float,
+  accountNumber: double
+};
+
+CREATE COLLECTION TestCollection(ColumnType) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.02.update.sqlpp
new file mode 100644
index 0000000..d57a311
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.02.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+INSERT INTO TestCollection({"id":1, "name":"Macbook1", "amount":123.2, "accountNumber":345.34});
+INSERT INTO TestCollection({"id":2, "name":"Macbook2", "amount":456.7, "accountNumber":123.45});
+INSERT INTO TestCollection({"id":3, "name":"Macbook3", "amount":789.1, "accountNumber":678.90});
+INSERT INTO TestCollection({"id":4, "name":"Macbook4", "amount":234.5, "accountNumber":567.89});
+INSERT INTO TestCollection({"id":5, "name":"Macbook5", "amount":876.5, "accountNumber":345.67});
+INSERT INTO TestCollection({"id":6, "name":"Macbook6", "amount":345.6, "accountNumber":987.65});
+INSERT INTO TestCollection({"id":7, "name":"Macbook7", "amount":678.9, "accountNumber":234.56});
+INSERT INTO TestCollection({"id":8, "name":"Macbook8", "amount":987.2, "accountNumber":789.12});
+INSERT INTO TestCollection({"id":9, "name":"Macbook9", "amount":543.2, "accountNumber":321.45});
+INSERT INTO TestCollection({"id":10, "name":"Macbook10", "amount":123.9, "accountNumber":654.32});
+INSERT INTO TestCollection({"id":11, "name":"Macbook11", "amount":567.8, "accountNumber":456.78});
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.03.update.sqlpp
new file mode 100644
index 0000000..c04f2fe
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.03.update.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+COPY (
+   SELECT id, name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "delimiter")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.04.ddl.sqlpp
new file mode 100644
index 0000000..b19cb4b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.04.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="false"),
+("delimiter"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/delimiter"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.05.query.sqlpp
new file mode 100644
index 0000000..b407ec4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.05.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopy d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.06.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.06.ddl.sqlpp
new file mode 100644
index 0000000..05d7b83
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.06.ddl.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Wrong delimiter passed so expected nothing in the output
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopyWrong(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="false"),
+("delimiter"=","),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/delimiter"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.07.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.07.query.sqlpp
new file mode 100644
index 0000000..7e6f1c5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/delimiter/delimiter.07.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopyWrong d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.01.ddl.sqlpp
new file mode 100644
index 0000000..80a8c34
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.01.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE ColumnType AS {
+  id: bigint,
+  name: string,
+  amount: float,
+  accountNumber: double
+};
+
+CREATE COLLECTION TestCollection(ColumnType) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.02.update.sqlpp
new file mode 100644
index 0000000..d57a311
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.02.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+INSERT INTO TestCollection({"id":1, "name":"Macbook1", "amount":123.2, "accountNumber":345.34});
+INSERT INTO TestCollection({"id":2, "name":"Macbook2", "amount":456.7, "accountNumber":123.45});
+INSERT INTO TestCollection({"id":3, "name":"Macbook3", "amount":789.1, "accountNumber":678.90});
+INSERT INTO TestCollection({"id":4, "name":"Macbook4", "amount":234.5, "accountNumber":567.89});
+INSERT INTO TestCollection({"id":5, "name":"Macbook5", "amount":876.5, "accountNumber":345.67});
+INSERT INTO TestCollection({"id":6, "name":"Macbook6", "amount":345.6, "accountNumber":987.65});
+INSERT INTO TestCollection({"id":7, "name":"Macbook7", "amount":678.9, "accountNumber":234.56});
+INSERT INTO TestCollection({"id":8, "name":"Macbook8", "amount":987.2, "accountNumber":789.12});
+INSERT INTO TestCollection({"id":9, "name":"Macbook9", "amount":543.2, "accountNumber":321.45});
+INSERT INTO TestCollection({"id":10, "name":"Macbook10", "amount":123.9, "accountNumber":654.32});
+INSERT INTO TestCollection({"id":11, "name":"Macbook11", "amount":567.8, "accountNumber":456.78});
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.03.update.sqlpp
new file mode 100644
index 0000000..cc0c717
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.03.update.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+COPY (
+   SELECT id, name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "header")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.04.ddl.sqlpp
new file mode 100644
index 0000000..f65d0d4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.04.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/header"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.05.query.sqlpp
new file mode 100644
index 0000000..b407ec4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.05.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopy d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.10.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.10.update.sqlpp
new file mode 100644
index 0000000..3b13ef0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.10.update.sqlpp
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Default Null Test
+ */
+
+USE test;
+
+COPY (
+   SELECT id, null name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "default", "null")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.11.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.11.ddl.sqlpp
new file mode 100644
index 0000000..1d6cd22
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.11.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopyDefaultNull(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/default/null"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.12.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.12.query.sqlpp
new file mode 100644
index 0000000..bff57e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.12.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopyDefaultNull dn order by dn.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.20.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.20.update.sqlpp
new file mode 100644
index 0000000..46ee936
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.20.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   SELECT id, null name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "custom", "null")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true",
+    "null":"IamNull"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.21.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.21.ddl.sqlpp
new file mode 100644
index 0000000..82f048d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.21.ddl.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Custom Null Test
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopyCustomNull(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/custom/null"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.22.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.22.query.sqlpp
new file mode 100644
index 0000000..72f9ff3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.22.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopyCustomNull dn order by dn.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.30.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.30.update.sqlpp
new file mode 100644
index 0000000..1fcf3cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.30.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   SELECT id, null name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "notUnknown")
+AS (id bigint, name STRING not unknown, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true",
+    "null":"IamNull"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.31.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.31.ddl.sqlpp
new file mode 100644
index 0000000..eb00e4d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.31.ddl.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Custom Null Test
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopyNotUnknown(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/notUnknown"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.32.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.32.query.sqlpp
new file mode 100644
index 0000000..a6f1945
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/header/header.32.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopyNotUnknown dn order by dn.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.01.ddl.sqlpp
new file mode 100644
index 0000000..80a8c34
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.01.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE ColumnType AS {
+  id: bigint,
+  name: string,
+  amount: float,
+  accountNumber: double
+};
+
+CREATE COLLECTION TestCollection(ColumnType) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.02.update.sqlpp
new file mode 100644
index 0000000..d57a311
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.02.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+INSERT INTO TestCollection({"id":1, "name":"Macbook1", "amount":123.2, "accountNumber":345.34});
+INSERT INTO TestCollection({"id":2, "name":"Macbook2", "amount":456.7, "accountNumber":123.45});
+INSERT INTO TestCollection({"id":3, "name":"Macbook3", "amount":789.1, "accountNumber":678.90});
+INSERT INTO TestCollection({"id":4, "name":"Macbook4", "amount":234.5, "accountNumber":567.89});
+INSERT INTO TestCollection({"id":5, "name":"Macbook5", "amount":876.5, "accountNumber":345.67});
+INSERT INTO TestCollection({"id":6, "name":"Macbook6", "amount":345.6, "accountNumber":987.65});
+INSERT INTO TestCollection({"id":7, "name":"Macbook7", "amount":678.9, "accountNumber":234.56});
+INSERT INTO TestCollection({"id":8, "name":"Macbook8", "amount":987.2, "accountNumber":789.12});
+INSERT INTO TestCollection({"id":9, "name":"Macbook9", "amount":543.2, "accountNumber":321.45});
+INSERT INTO TestCollection({"id":10, "name":"Macbook10", "amount":123.9, "accountNumber":654.32});
+INSERT INTO TestCollection({"id":11, "name":"Macbook11", "amount":567.8, "accountNumber":456.78});
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.03.update.sqlpp
new file mode 100644
index 0000000..b101ae5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.03.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+COPY (
+   SELECT id, null name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "null")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true",
+    "null":"IamNull"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.04.ddl.sqlpp
new file mode 100644
index 0000000..f3fdc90
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.04.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopyNull(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/null"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.05.query.sqlpp
new file mode 100644
index 0000000..28beb5d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/null/null.05.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopyNull d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.01.ddl.sqlpp
new file mode 100644
index 0000000..9ee53a6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/* Currently quote and escape are not supported while creating external dataset
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE ColumnType AS {
+  id: bigint,
+  name: string,
+  amount: float,
+  accountNumber: double
+};
+
+CREATE COLLECTION TestCollection(ColumnType) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.02.update.sqlpp
new file mode 100644
index 0000000..8dd62e7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.02.update.sqlpp
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+INSERT INTO TestCollection({"id":1, "name":"Macbook1", "amount":123.2, "accountNumber":345.34});
+INSERT INTO TestCollection({"id":2, "name":"Macbook2", "amount":456.7, "accountNumber":123.45});
+INSERT INTO TestCollection({"id":3, "name":"Macbook3", "amount":789.1, "accountNumber":678.90});
+INSERT INTO TestCollection({"id":4, "name":"Mac|,book4", "amount":234.5, "accountNumber":567.89});
+
+COPY (
+   SELECT id, name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "escape")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "header":"true",
+    "escape":"|"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.03.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.03.ddl.sqlpp
new file mode 100644
index 0000000..2ac9258
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.03.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("escape"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/escape"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.04.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.04.query.sqlpp
new file mode 100644
index 0000000..b407ec4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.04.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopy d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.11.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.11.update.sqlpp
new file mode 100644
index 0000000..b468831
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.11.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+COPY (
+   SELECT id, name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "escape", "1")
+AS (id bigint, name STRING, amount float, accountNumber double)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "header":"true",
+    "escape":"|",
+    "quote": "NONE"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.12.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.12.ddl.sqlpp
new file mode 100644
index 0000000..9224e3c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.12.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy1(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("escape"="|"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/escape/1"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.13.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.13.query.sqlpp
new file mode 100644
index 0000000..cc786e9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/quote-escape/quote-escape.13.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopy1 d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.01.ddl.sqlpp
new file mode 100644
index 0000000..1e43374
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.01.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE ColumnType AS {
+  id: bigint,
+  name: string?,
+  amount: float,
+  accountNumber: double
+};
+
+CREATE COLLECTION TestCollection(ColumnType) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.02.update.sqlpp
new file mode 100644
index 0000000..80b535c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.02.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+INSERT INTO TestCollection({"id":1, "name":"", "amount":123.2, "accountNumber":345.34});
+INSERT INTO TestCollection({"id":2, "name":"Macbook2", "amount":456.7, "accountNumber":123.45});
+INSERT INTO TestCollection({"id":3, "name":"Macbook3", "amount":789.1, "accountNumber":678.90});
+INSERT INTO TestCollection({"id":4, "name":"Macbook4", "amount":234.5, "accountNumber":567.89});
+INSERT INTO TestCollection({"id":5, "name":"Macbook5", "amount":876.5, "accountNumber":345.67});
+INSERT INTO TestCollection({"id":6, "name":"Macbook6", "amount":345.6, "accountNumber":987.65});
+INSERT INTO TestCollection({"id":7, "name":"Macbook7", "amount":678.9, "accountNumber":234.56});
+INSERT INTO TestCollection({"id":8, "name":"Macbook8", "amount":987.2, "accountNumber":789.12});
+INSERT INTO TestCollection({"id":9, "name":"Macbook9", "amount":543.2, "accountNumber":321.45});
+INSERT INTO TestCollection({"id":10, "name":"Macbook10", "amount":123.9, "accountNumber":654.32});
+INSERT INTO TestCollection({"id":11, "name":"Macbook11", "amount":567.8, "accountNumber":456.78});
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.03.update.sqlpp
new file mode 100644
index 0000000..cddab9e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.03.update.sqlpp
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * This test handles the case with all the CSV knobs. Schema type is JSON style.
+ */
+
+USE test;
+
+COPY (
+   SELECT id, null name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "simple-csv", "1")
+TYPE ( {id: bigint, name: string?, amount: float, accountNumber: double} )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true",
+    "null":"IamNull",
+    "quote":"'",
+    "force-quote":"false",
+    "escape":"\\",
+    "empty_field_as_null":"true"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.04.ddl.sqlpp
new file mode 100644
index 0000000..c38e775d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.04.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy1(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("quote"="'"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/simple-csv/1"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.05.query.sqlpp
new file mode 100644
index 0000000..cc786e9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.05.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopy1 d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.11.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.11.update.sqlpp
new file mode 100644
index 0000000..5a8bd3a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.11.update.sqlpp
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * This test handles the case with all the CSV knobs. Schema type is JSON style.
+ * Check for the case when putting missing as null values.
+ */
+
+USE test;
+
+COPY (
+   SELECT id, name, amount, accountNumber FROM TestCollection
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "simple-csv", "2")
+TYPE ( {id: bigint, name: string?, amount: float, accountNumber: double} )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter":"|",
+    "header":"true",
+    "null":"IamNull",
+    "quote":"'",
+    "force-quote":"false",
+    "escape":"\\",
+    "empty_field_as_null":"true"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.12.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.12.ddl.sqlpp
new file mode 100644
index 0000000..07577d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.12.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy2(ColumnType) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="true"),
+("delimiter"="|"),
+("quote"="'"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/simple-csv/2"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.13.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.13.query.sqlpp
new file mode 100644
index 0000000..547b512
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/simple-csv/simple-csv.13.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id, name, amount, accountNumber
+FROM DatasetCopy2 d order by d.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.01.ddl.sqlpp
new file mode 100644
index 0000000..6228097
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.01.ddl.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.02.update.sqlpp
new file mode 100644
index 0000000..ac5b0b9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.02.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+USE test;
+
+COPY (
+   SELECT "123" as id
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv", "type-mismatch")
+AS (id bigint)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.03.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.03.ddl.sqlpp
new file mode 100644
index 0000000..8daf039
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.03.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+CREATE EXTERNAL DATASET DatasetCopy(id String) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("header"="false"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/csv/type-mismatch"),
+  ("format" = "csv"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.csv")
+);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.04.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.04.query.sqlpp
new file mode 100644
index 0000000..8e2b639
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/csv/type-mismatch/type-mismatch.04.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id
+FROM DatasetCopy c order by c.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.01.ddl.sqlpp
new file mode 100644
index 0000000..591c949
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.01.ddl.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+  id: integer
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.02.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.02.ddl.sqlpp
new file mode 100644
index 0000000..ef6631b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.02.ddl.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks2")
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.03.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.03.ddl.sqlpp
new file mode 100644
index 0000000..87ebe40
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.03.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+AS (id wrongDataType)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.04.ddl.sqlpp
new file mode 100644
index 0000000..117fbde
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.04.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+AS (id bigint)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "quote": "ABCD"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.05.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.05.ddl.sqlpp
new file mode 100644
index 0000000..aed1ed4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.05.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+AS (id bigint)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "delimiter": "wrongDelimiter"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.06.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.06.ddl.sqlpp
new file mode 100644
index 0000000..e51644b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.06.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+AS (id bigint)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "escape": "wrongEscape"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.07.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.07.ddl.sqlpp
new file mode 100644
index 0000000..2509d18
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.07.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+AS (id bigint)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "record-delimiter": "ABCD"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.08.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.08.ddl.sqlpp
new file mode 100644
index 0000000..a995d7a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.08.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+TYPE ( { id : int,  name : { first :  [ string ] } } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "record-delimiter": ","
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.09.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.09.ddl.sqlpp
new file mode 100644
index 0000000..e2d7b4f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.09.ddl.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+TYPE ( { id : int,  name : [ string ] } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "record-delimiter": ","
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.10.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.10.ddl.sqlpp
new file mode 100644
index 0000000..8706be3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/csv-error-checks/csv-error-checks.10.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "csv-error-checks3")
+TYPE ( { id : int,  name : string } )
+AS ( id string )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"csv",
+    "record-delimiter": ","
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.01.ddl.sqlpp
new file mode 100644
index 0000000..0bfc4eea
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.01.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+  id: int
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.03.update.sqlpp
new file mode 100644
index 0000000..8eb14c0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.03.update.sqlpp
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+insert into TestCollection({"id":2, "name": "John", "nested" : { "first" : "john" , "second":"JOHN" }  });
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks3")
+TYPE ( {id : string, name : string, nested: { first : string, second : string} }
+)
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.04.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.04.update.sqlpp
new file mode 100644
index 0000000..5e8d730
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.04.update.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks4")
+TYPE ( {id : bigint,name : string, nested: { first : string } } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.05.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.05.update.sqlpp
new file mode 100644
index 0000000..cda2519
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.05.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks5")
+TYPE ( {  id : bigint, name : string, nested : string } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.06.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.06.update.sqlpp
new file mode 100644
index 0000000..a6db11d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.06.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks6")
+TYPE ( {id : bigint,name : {  first : string },nested:{first : string,second : string} } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.07.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.07.update.sqlpp
new file mode 100644
index 0000000..851559a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.07.update.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks7")
+TYPE ( {id:int} )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "row-group-size":"random"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.08.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.08.update.sqlpp
new file mode 100644
index 0000000..2f356fb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.08.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks8")
+TYPE ( {id:int} )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "page-size":"random"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.09.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.09.update.sqlpp
new file mode 100644
index 0000000..f2293e3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.09.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks9")
+TYPE ( { name:string } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "compression":"rar"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.10.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.10.update.sqlpp
new file mode 100644
index 0000000..4f52164
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.10.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks10")
+TYPE ( { name: } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "schema":"message schema{"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.11.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.11.update.sqlpp
new file mode 100644
index 0000000..ca2cfc1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.11.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks11")
+TYPE ( { id : int , name : binary } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.12.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.12.update.sqlpp
new file mode 100644
index 0000000..86e55e5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.12.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks12")
+TYPE ( { id :  int, name : string } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "version" : 3
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.13.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.13.update.sqlpp
new file mode 100644
index 0000000..f5d7eda
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.13.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks13")
+TYPE ( { id :  int,  name : string, list : [string]} )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "max-schemas" : "yvghc"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.14.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.14.update.sqlpp
new file mode 100644
index 0000000..7628044
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.14.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks14")
+TYPE ( {id :  int, name : string, l : int } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "max-schemas" : "15"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.15.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.15.update.sqlpp
new file mode 100644
index 0000000..89bfb2a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.15.update.sqlpp
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+
+
+insert into TestCollection({"id":1, "name": []});
+
+
+
+COPY (
+select c.* from TestCollection c
+    ) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks15")
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+    }
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.16.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.16.update.sqlpp
new file mode 100644
index 0000000..ad4fc73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.16.update.sqlpp
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+insert into TestCollection({"id":10, "name": 1 });
+insert into TestCollection({"id":12, "name": [1] });
+insert into TestCollection({"id":15, "name": [[1]] });
+insert into TestCollection({"id":16, "name": [[[1]]] });
+insert into TestCollection({"id":17, "name": [[[[1]]]] });
+insert into TestCollection({"id":18, "name": [[[[[1]]]]] });
+insert into TestCollection({"id":19, "name": [[[[[[1]]]]]] });
+
+
+
+COPY (
+select id,name from TestCollection c
+    ) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks16")
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "max-schemas" : "2"
+    }
+
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/supported-adapter-format-compression/supported-adapters.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/supported-adapter-format-compression/supported-adapters.03.update.sqlpp
index 2e227d6..a9fe8cf 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/supported-adapter-format-compression/supported-adapters.03.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/supported-adapter-format-compression/supported-adapters.03.update.sqlpp
@@ -28,7 +28,7 @@
     "region":"us-west-2",
     "serviceEndpoint":"http://127.0.0.1:8001",
     "container":"playground",
-    "format":"csv"
+    "format":"avro"
 }
 
 
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp
new file mode 100644
index 0000000..56e79c8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+  id: integer,
+  name : string
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp
new file mode 100644
index 0000000..ec1ac0c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : create a dataset using year-month-duration as the primary key
+ * Expected Res : Success
+ * Date         : 7 May 2013
+ * Issue        : 363
+ */
+
+use test;
+/*
+insert into TestCollection({"id":`year-month-duration`("P16Y"), "name": "John"});
+insert into TestCollection({"id":`day-time-duration`("-P3829H849.392S"), "name": "Alex"});
+*/
+
+insert into TestCollection({"id":18, "name": "Virat" , "dateType":date("1988-11-05"), "timeType": time("03:10:00.493Z") , "boolType" : false , "doubleType" : 0.75, "datetimeType" : datetime("1900-02-01T00:00:00") });
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp
new file mode 100644
index 0000000..0012e22
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-cover-data-types")
+TYPE ( {   name : string,  id : int,  dateType : date,   timeType : time,  boolType : boolean,   doubleType : double,  datetimeType : datetime   } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp
new file mode 100644
index 0000000..310198e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+};
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING S3
+(
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("region"="us-west-2"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-cover-data-types/"),
+  ("include"="*.parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp
new file mode 100644
index 0000000..b03fc5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp
new file mode 100644
index 0000000..6be9489
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+    id: integer
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.02.update.sqlpp
new file mode 100644
index 0000000..d569ee5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.02.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+INSERT INTO TestCollection {"id":2,"name":{"first":["power","star"]}};
+INSERT INTO TestCollection {"id":5,"name":{"first":[]}};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.03.update.sqlpp
new file mode 100644
index 0000000..9a652d2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.03.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+select c.* from TestCollection c
+    ) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-empty-array")
+TYPE ( { id : int,  name : { first :  [ string ] } } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+    };
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp
new file mode 100644
index 0000000..f17c4d4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+    };
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING S3
+(
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("region"="us-west-2"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-empty-array/"),
+  ("include"="*.parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.05.query.sqlpp
new file mode 100644
index 0000000..b03fc5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.01.ddl.sqlpp
new file mode 100644
index 0000000..00d5c49
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.01.ddl.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+ id: int
+};
+
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.02.update.sqlpp
new file mode 100644
index 0000000..308d3b9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.02.update.sqlpp
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+
+-- INSERT INTO TestCollection( {"id":2,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":5,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":8,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":10,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":12,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":15,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":17,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":20,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":21,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":27,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":28,"name":{"first":"virat"}});
+
+
+insert into TestCollection({"id":2});
+insert into TestCollection({"id":5,"name":"virat"});
+insert into TestCollection({"id":8,"name":{"first":"virat"}});
+insert into TestCollection({"id":10,"name":{"first":"virat"},"age":18});
+insert into TestCollection({"id":12,"name":123});
+insert into TestCollection({"id":15,"name":[123,456]});
+insert into TestCollection({"id":17,"name":765});
+insert into TestCollection({"id":20,"name":[789]});
+insert into TestCollection({"id":21,"name":[{"first":"virat"}]});
+insert into TestCollection({"id":27,"name":[{"first":"virat","second":"kohli"}]});
+insert into TestCollection({"id":28,"name":{"first":"virat"}});
+
+
+
+-- INSERT INTO TestCollection( {"id":1,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":2,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":3,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":4,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":5,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":6,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":7,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":8,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":9,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":10,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":11,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":12,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":13,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":14,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":15,"name":{"first":"virat"}});
+-- INSERT INTO TestCollection( {"id":16,"name":{"first":"virat"}});
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.03.update.sqlpp
new file mode 100644
index 0000000..a7e6e16
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.03.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-heterogeneous")
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.04.ddl.sqlpp
new file mode 100644
index 0000000..95f75c6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.04.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+};
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING S3
+(
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("region"="us-west-2"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-heterogeneous/"),
+  ("include"="*.parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.05.query.sqlpp
new file mode 100644
index 0000000..3d26c18
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-heterogeneous/parquet-heterogeneous.05.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.01.ddl.sqlpp
new file mode 100644
index 0000000..2155b4e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+    id: int
+    };
+
+CREATE DATASET ParitionParquetDataset(ColumnType1)
+PRIMARY KEY id WITH {
+    "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+    };
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.02.update.sqlpp
new file mode 100644
index 0000000..4de223e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET ParitionParquetDataset USING localfs
+(
+    ("path" = "asterix_nc1://data/hdfs/parquet/partition_heterogeneous.json"),
+    ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.03.update.sqlpp
new file mode 100644
index 0000000..948ba17
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.03.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+SELECT t.* FROM ParitionParquetDataset t
+    ) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-partition-heterogeneous" , partitioner_key , "random" )
+OVER ( PARTITION BY toWriter.partitioner_key AS partitioner_key )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "max-schemas" : "10"
+    };
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.04.ddl.sqlpp
new file mode 100644
index 0000000..c626270
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.04.ddl.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET ParitionParquetDatasetCopy(ColumnType2) USING S3
+(
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("region"="us-west-2"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-partition-heterogeneous/"),
+  ("include"="*.parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.05.query.sqlpp
new file mode 100644
index 0000000..7009f98
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM ParitionParquetDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.01.ddl.sqlpp
new file mode 100644
index 0000000..76970a5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.01.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.02.update.sqlpp
new file mode 100644
index 0000000..4bec537
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.02.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+   select "123" as id
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-simple")
+TYPE ( {id:string} )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet",
+    "version" : "2"
+};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.03.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.03.ddl.sqlpp
new file mode 100644
index 0000000..a5d7789
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.03.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE EXTERNAL DATASET DatasetCopy(ColumnType2) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("region"="us-west-2"),
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-simple"),
+  ("format" = "parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("include"="*.parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.04.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.04.query.sqlpp
new file mode 100644
index 0000000..5aeedb8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.04.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id
+FROM DatasetCopy c
+ORDER BY c.id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.01.ddl.sqlpp
new file mode 100644
index 0000000..f890e0d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+    id: string
+};
+
+CREATE DATASET DummyTweetDataset(ColumnType1)
+PRIMARY KEY id WITH {
+    "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.02.update.sqlpp
new file mode 100644
index 0000000..83a1140
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET DummyTweetDataset USING localfs
+(
+    ("path" = "asterix_nc1://data/hdfs/parquet/dummy_tweet.json"),
+    ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.03.update.sqlpp
new file mode 100644
index 0000000..aed4e09
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.03.update.sqlpp
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   SELECT c.* FROM DummyTweetDataset c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-tweet")
+TYPE ( {
+               coordinates: {
+                 coordinates: [
+                   double
+                 ],
+                 `type` : string
+               },
+               created_at: string,
+               entities: {
+                 urls: [
+                   {
+                     display_url: string,
+                     expanded_url: string,
+                     indices: [
+                       int
+                     ],
+                     url: string
+                   }
+                 ],
+                 user_mentions: [
+                   {
+                     id: int,
+                     id_str: string,
+                     indices: [
+                       int
+                     ],
+                     name: string,
+                     screen_name: string
+                   }
+                 ]
+               },
+               favorite_count: int,
+               favorited: boolean,
+               filter_level: string,
+               geo: {
+                 coordinates: [
+                   double
+                 ],
+                 `type`: string
+               },
+               id: string,
+               id_str: string,
+               in_reply_to_screen_name: string,
+               in_reply_to_status_id: int,
+               in_reply_to_status_id_str: string,
+               in_reply_to_user_id: int,
+               in_reply_to_user_id_str: string,
+               is_quote_status: boolean,
+               lang: string,
+               place: {
+                 bounding_box: {
+                   coordinates: [
+                     [
+                       [
+                         double
+                       ]
+                     ]
+                   ],
+                   `type`: string
+                 },
+                 country: string,
+                 country_code: string,
+                 full_name: string,
+                 id: string,
+                 name: string,
+                 place_type: string,
+                 url: string
+               },
+               possibly_sensitive: boolean,
+               quoted_status: {
+                 created_at: string,
+                 entities: {
+                   user_mentions: [
+                     {
+                       id: int,
+                       id_str: string,
+                       indices: [
+                         int
+                       ],
+                       name: string,
+                       screen_name: string
+                     }
+                   ]
+                 },
+                 favorite_count: int,
+                 favorited: boolean,
+                 filter_level: string,
+                 id: int,
+                 id_str: string,
+                 in_reply_to_screen_name: string,
+                 in_reply_to_status_id: int,
+                 in_reply_to_status_id_str: string,
+                 in_reply_to_user_id: int,
+                 in_reply_to_user_id_str: string,
+                 is_quote_status: boolean,
+                 lang: string,
+                 retweet_count: int,
+                 retweeted: boolean,
+                 source: string,
+                 text: string,
+                 truncated: boolean,
+                 user: {
+                   contributors_enabled: boolean,
+                   created_at: string,
+                   default_profile: boolean,
+                   default_profile_image: boolean,
+                   description: string,
+                   favourites_count: int,
+                   followers_count: int,
+                   friends_count: int,
+                   geo_enabled: boolean,
+                   id: int,
+                   id_str: string,
+                   is_translator: boolean,
+                   lang: string,
+                   listed_count: int,
+                   name: string,
+                   profile_background_color: string,
+                   profile_background_image_url: string,
+                   profile_background_image_url_https: string,
+                   profile_background_tile: boolean,
+                   profile_banner_url: string,
+                   profile_image_url: string,
+                   profile_image_url_https: string,
+                   profile_link_color: string,
+                   profile_sidebar_border_color: string,
+                   profile_sidebar_fill_color: string,
+                   profile_text_color: string,
+                   profile_use_background_image: boolean,
+                   protected: boolean,
+                   screen_name: string,
+                   statuses_count: int,
+                   verified: boolean
+                 }
+               },
+               quoted_status_id: int,
+               quoted_status_id_str: string,
+               retweet_count: int,
+               retweeted: boolean,
+               source: string,
+               text: string,
+               timestamp_ms: string,
+               truncated: boolean,
+               user: {
+                 contributors_enabled: boolean,
+                 created_at: string,
+                 default_profile: boolean,
+                 default_profile_image: boolean,
+                 description: string,
+                 favourites_count: int,
+                 followers_count: int,
+                 friends_count: int,
+                 geo_enabled: boolean,
+                 id: int,
+                 id_str: string,
+                 is_translator: boolean,
+                 lang: string,
+                 listed_count: int,
+                 location: string,
+                 name: string,
+                 profile_background_color: string,
+                 profile_background_image_url: string,
+                 profile_background_image_url_https: string,
+                 profile_background_tile: boolean,
+                 profile_banner_url: string,
+                 profile_image_url: string,
+                 profile_image_url_https: string,
+                 profile_link_color: string,
+                 profile_sidebar_border_color: string,
+                 profile_sidebar_fill_color: string,
+                 profile_text_color: string,
+                 profile_use_background_image: boolean,
+                 protected: boolean,
+                 screen_name: string,
+                 statuses_count: int,
+                 time_zone: string,
+                 url: string,
+                 utc_offset: int,
+                 verified: boolean
+               }
+             } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.04.ddl.sqlpp
new file mode 100644
index 0000000..1cf0c78
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.04.ddl.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET DummyTweetDatasetCopy(ColumnType2) USING S3
+(
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("region"="us-west-2"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-tweet/"),
+  ("include"="*.parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.05.query.sqlpp
new file mode 100644
index 0000000..13587f6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM DummyTweetDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.01.ddl.sqlpp
new file mode 100644
index 0000000..dfc64ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+    id: int
+};
+
+CREATE DATASET NameCommentDataset(ColumnType1)
+PRIMARY KEY id WITH {
+    "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.02.update.sqlpp
new file mode 100644
index 0000000..8591369
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET NameCommentDataset USING localfs
+(
+    ("path" = "asterix_nc1://data/hdfs/parquet/id_name_comment.json"),
+    ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.03.update.sqlpp
new file mode 100644
index 0000000..9a1c9a4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.03.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+   SELECT c.* FROM NameCommentDataset c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-utf8")
+TYPE ( { comment:string, id:bigint, name:string } )
+WITH {
+    "accessKeyId":"dummyAccessKey",
+    "secretAccessKey":"dummySecretKey",
+    "region":"us-west-2",
+    "serviceEndpoint":"http://127.0.0.1:8001",
+    "container":"playground",
+    "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.04.ddl.sqlpp
new file mode 100644
index 0000000..4fd41f6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.04.ddl.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET NameCommentDatasetCopy(ColumnType2) USING S3
+(
+  ("serviceEndpoint"="http://127.0.0.1:8001"),
+  ("region"="us-west-2"),
+  ("container"="playground"),
+  ("definition"="copy-to-result/parquet-utf8/"),
+  ("include"="*.parquet"),
+  ("requireVersionChangeDetection"="false"),
+  ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.05.query.sqlpp
new file mode 100644
index 0000000..17cd027
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM NameCommentDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp
index 3ab3bc6..6bc1564 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp
@@ -27,7 +27,13 @@
 {"id": 129, "myGeometry": st_geom_from_geojson({"type": "MultiPolygon","coordinates": [[[[40, 40], [20, 45], [45, 30], [40, 40]]],[[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]],[[30, 20], [20, 15], [20, 25], [30, 20]]]]})},
 {"id": 130, "myGeometry": st_make_point(-71.1043443253471, 42.3150676015829)},
 {"id": 131, "myGeometry": st_make_point(1.0,2.0,3.0)},
-{"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)},
+/* The query below is intentionally commented as JTS WKBWriter does not support 4 dimensional coordinates https://github.com/locationtech/jts/issues/733, If uncommented add the below assertions to the files mentioned
+line 9 at result.03.adm should have the result { "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4]} } }
+line 9 at result.04.adm should have the result {{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 4, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("01B90B0000000000000000F03F000000000000004000000000000008400000000000001040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3,4]}", "WKT": "POINT ZM (1 2 3 4)" }
+line 4 at result.05.adm should have the result { "X": 1.0, "Y": 2.0, "Z": 3.0, "M": 4.0 }
+line 9 at result.09.adm should have the result { "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
+*/
+/* {"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)}, */
 {"id": 133, "myGeometry": st_geom_from_text('POLYGON((743238 2967416,743238 2967450,743265 2967450,743265.625 2967416,743238 2967416))')},
 {"id": 134, "myGeometry": st_geom_from_wkb(hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"))},
 {"id": 135, "myGeometry": st_line_from_multipoint(st_geom_from_text('MULTIPOINT(1 2 , 4 5 , 7 8 )'))},
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp
index bdfef62..02d5064 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp
@@ -27,7 +27,11 @@
 {"id": 129, "myGeometry": st_geom_from_geojson({"type": "MultiPolygon","coordinates": [[[[40, 40], [20, 45], [45, 30], [40, 40]]],[[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]],[[30, 20], [20, 15], [20, 25], [30, 20]]]]})},
 {"id": 130, "myGeometry": st_make_point(-71.1043443253471, 42.3150676015829)},
 {"id": 131, "myGeometry": st_make_point(1.0,2.0,3.0)},
-{"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)},
+/* The query below is intentionally commented as JTS WKBWriter does not support 4 dimensional coordinates https://github.com/locationtech/jts/issues/733, If uncommented add the below assertions to the files mentioned
+line 9 at result.20.adm should have the result { "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4]} } }
+line 2 at result.21.adm should have the result 132
+*/
+/* {"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)}, */
 {"id": 133, "myGeometry": st_geom_from_text('POLYGON((743238 2967416,743238 2967450,743265 2967450,743265.625 2967416,743238 2967416))')},
 {"id": 134, "myGeometry": st_geom_from_wkb(hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"))},
 {"id": 135, "myGeometry": st_line_from_multipoint(st_geom_from_text('MULTIPOINT(1 2 , 4 5 , 7 8 )'))},
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/single-method/predicate.15.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/single-method/predicate.15.query.sqlpp
new file mode 100644
index 0000000..c1637ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/single-method/predicate.15.query.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+st_z_max(st_geom_from_text("POLYGON EMPTY"));
+st_z_min(st_geom_from_text("POLYGON EMPTY"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 0, 0 0 0))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 0, 0 0 0))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 1, 1 1 2, 1 -1 3, 0 0 1))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 1, 1 1 2, 1 -1 3, 0 0 1))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 -2, 1 -1 -3, 0 0 -1))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 -2, 1 -1 -3, 0 0 -1))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 0, 1 -1 1, 0 0 -1))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 0, 1 -1 1, 0 0 -1))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 2, 0 0 0))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 2, 0 0 0))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 -1, 1 -1 0, 0 0 0))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 -1, 1 -1 0, 0 0 0))"));
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.01.ddl.sqlpp
new file mode 100644
index 0000000..8a1a1d9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.01.ddl.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE t1 AS { id: int,name: string, age: int };
+
+CREATE DATASET ds1(t1) PRIMARY KEY id;
+
+CREATE INDEX ds1_name ON ds1(name: string);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.02.update.sqlpp
new file mode 100644
index 0000000..94642af
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.02.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+UPSERT INTO ds1 ([
+{"id":1, "name": "John", "age": 21},
+{"id":2, "name": "asd gdh", "age": 23},
+{"id":3, "name": "xossted", "age": 102},
+{"id":4, "name": "asdgdh👩‍👩‍👧dfsd", "age": 23},
+{"id":5, "name": "sxvciis", "age": 13},
+{"id":6, "name": "fsdxv😀", "age": 17},
+{"id":7, "name": "fsdxv😁", "age": 17},
+{"id":8, "name": "Join", "age": 21},
+{"id":9, "name": "sxvfjshfjisciis", "age": 13},
+{"id":10, "name": "syvgjshfjisciis", "age": 123}
+]);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.03.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.03.query.sqlpp
new file mode 100644
index 0000000..c93433e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.03.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "fsdxv😀%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.04.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.04.query.sqlpp
new file mode 100644
index 0000000..94b9911
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.04.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "fsdxv😀%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.05.query.sqlpp
new file mode 100644
index 0000000..05f4fcb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.05.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "asdgdh👩‍👩‍👧%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.06.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.06.query.sqlpp
new file mode 100644
index 0000000..712f952
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.06.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "asdgdh👩‍👩‍👧%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.07.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.07.query.sqlpp
new file mode 100644
index 0000000..e196c9c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.07.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "sxvciis";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.08.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.08.query.sqlpp
new file mode 100644
index 0000000..8727844
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.08.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "sxvciis";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.09.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.09.query.sqlpp
new file mode 100644
index 0000000..9e40b15
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.09.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "Joh%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.10.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.10.query.sqlpp
new file mode 100644
index 0000000..c5272bb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.10.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "Joh%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.11.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.11.query.sqlpp
new file mode 100644
index 0000000..f4af3ef
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.11.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "s%vfjs__jiscii%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.12.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.12.query.sqlpp
new file mode 100644
index 0000000..712538d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.12.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "s%vfjs__jiscii%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.13.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.13.query.sqlpp
new file mode 100644
index 0000000..a4a194c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.13.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "sxvc_is";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.14.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.14.query.sqlpp
new file mode 100644
index 0000000..f191e96
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.14.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "sxvc_is";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.15.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.15.query.sqlpp
new file mode 100644
index 0000000..bc095fe
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.15.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "sxvc_i%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.16.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.16.query.sqlpp
new file mode 100644
index 0000000..15de270
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.16.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "sxvc_i%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.17.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.17.query.sqlpp
new file mode 100644
index 0000000..c7370c5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.17.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "sxvfjs%jisci%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.18.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.18.query.sqlpp
new file mode 100644
index 0000000..915e4ac
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.18.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "sxvfjs%jisci%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.19.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.19.query.sqlpp
new file mode 100644
index 0000000..81ea8e5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.19.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "J%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.20.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.20.query.sqlpp
new file mode 100644
index 0000000..c6f30c2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.20.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "J%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.21.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.21.query.sqlpp
new file mode 100644
index 0000000..4cddaff
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.21.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select * from ds1 where name like "sx%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.22.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.22.query.sqlpp
new file mode 100644
index 0000000..4a02f4d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/index-selection/like-expression/like-expression.22.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+explain select * from ds1 where name like "sx%";
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.1.ddl.sqlpp
new file mode 100644
index 0000000..4271f99
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.1.ddl.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Description: This test case is to verify the fix for ASTERIXDB-3419
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+create type dt1 as {itemid: int};
+
+create dataset collection1(dt1) PRIMARY KEY itemid;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.2.update.sqlpp
new file mode 100644
index 0000000..d805bd0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.2.update.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+insert into collection1
+([
+    {
+       "categories": "Category 1, Category 2, Category 6, Category 9, Category 10, Category 12, Category 13, Category 14, Category 16, Category 17, Category 18, Category 19, Category 20, Category 21, Category 22",
+       "itemid":10,
+       "description":"ABC"
+    },
+    {
+           "categories": "Category 1, Category 3, Category 5, Category 7, Category 8, Category 9, Category 10, Category 19, Category 10, Category 20, Category 21, Category 22, Category 23, Category 31, Category 32",
+           "itemid":12,
+           "description":"XYZ"
+        }
+]);
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.query.sqlpp
new file mode 100644
index 0000000..9081a11
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.query.sqlpp
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Description: This test case is to verify the fix for ASTERIXDB-3410
+ */
+
+use test;
+
+SELECT VALUE OBJECT_REMOVE(t, 'categories')
+FROM (
+  SELECT t.*,
+         to_bigint(CONTAINS(categories, "Category 1")) AS `Category 1`
+  FROM (
+    SELECT t.*,
+           to_bigint(CONTAINS(categories, "Category 2")) AS `Category 2`
+    FROM (
+      SELECT t.*,
+             to_bigint(CONTAINS(categories, "Category 3")) AS `Category 3`
+      FROM (
+        SELECT t.*,
+               to_bigint(CONTAINS(categories, "Category 4")) AS `Category 4`
+        FROM (
+          SELECT t.*,
+                 to_bigint(CONTAINS(categories, "Category 5")) AS `Category 5`
+          FROM (
+            SELECT t.*,
+                   to_bigint(CONTAINS(categories, "Category 6")) AS `Category 6`
+            FROM (
+              SELECT t.*,
+                     to_bigint(CONTAINS(categories, "Category 7")) AS `Category 7`
+              FROM (
+                SELECT t.*,
+                       to_bigint(CONTAINS(categories, "Category 8")) AS `Category 8`
+                FROM (
+                  SELECT t.*,
+                         to_bigint(CONTAINS(categories, "Category 9")) AS `Category 9`
+                  FROM (
+                    SELECT t.*,
+                           to_bigint(CONTAINS(categories, "Category 10")) AS `Category 10`
+                    FROM (
+                      SELECT t.*,
+                             to_bigint(CONTAINS(categories, "Category 11")) AS `Category 11`
+                      FROM (
+                        SELECT t.*,
+                               to_bigint(CONTAINS(categories, "Category 12")) AS `Category 12`
+                        FROM (
+                          SELECT t.*,
+                                 to_bigint(CONTAINS(categories, "Category 13")) AS `Category 13`
+                          FROM (
+                            SELECT t.*,
+                                   to_bigint(CONTAINS(categories, "Category 14")) AS `Category 14`
+                            FROM (
+                              SELECT t.*,
+                                     to_bigint(CONTAINS(categories, "Category 15")) AS `Category 15`
+                              FROM (
+                                SELECT t.*,
+                                       to_bigint(CONTAINS(categories, "Category 16")) AS `Category 16`
+                                FROM (
+                                  SELECT t.*,
+                                         to_bigint(CONTAINS(categories, "Category 17")) AS `Category 17`
+                                  FROM (
+                                    SELECT t.*,
+                                           to_bigint(CONTAINS(categories, "Category 18")) AS `Category 18`
+                                    FROM (
+                                      SELECT t.*,
+                                             to_bigint(CONTAINS(categories, "Category 19")) AS `Category 19`
+                                      FROM (
+                                        SELECT t.*,
+                                               to_bigint(CONTAINS(categories, "Category 20")) AS `Category 20`
+                                        FROM (
+                                          SELECT t.*,
+                                                 to_bigint(CONTAINS(categories, "Category 21")) AS `Category 21`
+                                          FROM (
+                                            SELECT t.*,
+                                                   to_bigint(CONTAINS(categories, "Category 22")) AS `Category 22`
+                                            FROM (
+                                              SELECT t.*,
+                                                     to_bigint(CONTAINS(categories, "Category 23")) AS `Category 23`
+                                              FROM (
+                                                SELECT t.*,
+                                                       to_bigint(CONTAINS(categories, "Category 24")) AS `Category 24`
+                                                FROM (
+                                                  SELECT t.*,
+                                                         to_bigint(CONTAINS(categories, "Category 25")) AS `Category 25`
+                                                  FROM (
+                                                    SELECT t.*,
+                                                           to_bigint(CONTAINS(categories, "Category 26")) AS `Category 26`
+                                                    FROM collection1 t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ORDER BY itemid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-does-not-exist/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-does-not-exist/test.040.query.sqlpp
new file mode 100644
index 0000000..dc66c29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-does-not-exist/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "doesNotExistCollection") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-does-not-exist/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-does-not-exist/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-does-not-exist/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.010.ddl.sqlpp
new file mode 100644
index 0000000..7544a66
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.010.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.040.query.sqlpp
new file mode 100644
index 0000000..d4442c7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/collection-exists/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.010.ddl.sqlpp
new file mode 100644
index 0000000..5dcc7e0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.010.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
+
+CREATE TYPE test AS open {};
+CREATE EXTERNAL DATASET testExternalCollection(test) USING S3 (
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("region"="us-west-2"),
+("serviceEndpoint"="http://127.0.0.1:8001"),
+("container"="playground"),
+("definition"="json-data/reviews/single-line/json"),
+("format"="json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.040.query.sqlpp
new file mode 100644
index 0000000..9915cfd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testExternalCollection") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/external-collection/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.010.ddl.sqlpp
new file mode 100644
index 0000000..7544a66
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.010.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.021.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.021.update.sqlpp
new file mode 100644
index 0000000..24916e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.021.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+CREATE INDEX testCollection_idx ON testCollection(some_value: int);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.040.query.sqlpp
new file mode 100644
index 0000000..905a20a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection", "testCollection_idx_fake") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-does-not-exist/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.010.ddl.sqlpp
new file mode 100644
index 0000000..7544a66
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.010.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.021.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.021.update.sqlpp
new file mode 100644
index 0000000..24916e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.021.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+CREATE INDEX testCollection_idx ON testCollection(some_value: int);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.040.query.sqlpp
new file mode 100644
index 0000000..cb60cb9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection", "testCollection_idx") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/index-exists/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.010.ddl.sqlpp
new file mode 100644
index 0000000..7544a66
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.010.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.040.query.sqlpp
new file mode 100644
index 0000000..d376541
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.041.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.041.query.sqlpp
new file mode 100644
index 0000000..b395ed5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.041.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection", "testIndex", "tooManyArgs") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/invalid-arguments-count/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.010.ddl.sqlpp
new file mode 100644
index 0000000..7544a66
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.010.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.040.query.sqlpp
new file mode 100644
index 0000000..2a0260e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.040.query.sqlpp
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+SELECT storage_size(v.id, v.id, v.id) FROM testCollection AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-constant-argument/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.010.ddl.sqlpp
new file mode 100644
index 0000000..7544a66
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.010.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.040.query.sqlpp
new file mode 100644
index 0000000..ddb1996
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size(1, 2, 3) AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/non-string-argument/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.010.ddl.sqlpp
new file mode 100644
index 0000000..761ae9f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.010.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection PRIMARY KEY (id: int);
+
+CREATE INDEX testCollection_idx ON testCollection(someField1: int);
+
+CREATE COLLECTION collectionAndIndexes PRIMARY KEY (id: int);
+CREATE COLLECTION collectionOnly PRIMARY KEY (id: int);
+CREATE COLLECTION indexOnly PRIMARY KEY (id: int);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.020.update.sqlpp
new file mode 100644
index 0000000..44905ff
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.020.update.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {
+    "id": intVal,
+    "someField1": intVal,
+    "someField2": uuid(),
+    "someField3": {"firstName": uuid(), "lastName": uuid(), "location": {"lon": intVal * 2 + intVal, "lat": intVal * 7 + intVal}}
+    }
+    FROM range(1, 100000) intVal
+);
+
+INSERT INTO collectionAndIndexes({"id": 1, "valueField": (select value size from storage_size("testDatabase", "testScope", "testCollection") as x)[0]});
+INSERT INTO collectionOnly({"id": 1, "valueField": (select value size from storage_size("testDatabase", "testScope", "testCollection", "testCollection") as x)[0]});
+INSERT INTO indexOnly({"id": 1, "valueField": (select value size from storage_size("testDatabase", "testScope", "testCollection", "testCollection_idx") as x)[0]});
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.040.query.sqlpp
new file mode 100644
index 0000000..4ad1a6e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.040.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+SELECT a.valueField > b.valueField AS collectionAndIndexesLargerThanCollectionOnly, b.valueField > c.valueField AS collectionOnlyLargerThanIndexOnly
+FROM collectionAndIndexes a,
+     collectionOnly b,
+     indexOnly c
+WHERE a.id = b.id
+  AND a.id = c.id
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/sanity/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.010.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.010.ddl.sqlpp
new file mode 100644
index 0000000..34a3239
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.010.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE testScope IF EXISTS;
+CREATE DATAVERSE testScope;
+USE testScope;
+
+CREATE COLLECTION testCollection
+PRIMARY KEY (id: int);
+
+CREATE VIEW testCollection_vw(id int)
+DEFAULT NULL datetime "YYYY-MM-DD hh:mm:ss" AS
+SELECT t.id
+FROM testCollection t;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.020.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.020.update.sqlpp
new file mode 100644
index 0000000..711dd3f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.020.update.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE testScope;
+
+INSERT INTO testCollection(
+    SELECT VALUE {"id": intVal, "some_value": intVal}
+    FROM range(1, 512) intVal
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.040.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.040.query.sqlpp
new file mode 100644
index 0000000..cd88a1e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.040.query.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+SELECT VALUE v FROM storage_size("testDatabase", "testScope", "testCollection_vw") AS v;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.999.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.999.ddl.sqlpp
new file mode 100644
index 0000000..10b92b4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/storage-size/datasource-function/view-not-collection/test.999.ddl.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATABASE testDatabase IF EXISTS;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_01/countn_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_01/countn_01.1.adm
new file mode 100644
index 0000000..00750ed
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_01/countn_01.1.adm
@@ -0,0 +1 @@
+3
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_02/countn_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_02/countn_02.1.adm
new file mode 100644
index 0000000..565b903
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_02/countn_02.1.adm
@@ -0,0 +1,7 @@
+3
+2
+1
+1
+0
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_distinct/countn_distinct.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_distinct/countn_distinct.1.adm
new file mode 100644
index 0000000..c1f0a1c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_distinct/countn_distinct.1.adm
@@ -0,0 +1 @@
+{ "t1": 3, "t2": 0, "t3": 2, "t4": 3, "t5": 0 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_01/countn_empty_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_01/countn_empty_01.1.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_01/countn_empty_01.1.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_02/countn_empty_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_02/countn_empty_02.1.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_02/countn_empty_02.1.adm
@@ -0,0 +1 @@
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_null/countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_null/countn_null.1.adm
new file mode 100644
index 0000000..3c4c7ec
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_null/countn_null.1.adm
@@ -0,0 +1 @@
+{ "sql-count": 3 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn/scalar_count.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn/scalar_count.1.adm
new file mode 100644
index 0000000..3230e04
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn/scalar_count.1.adm
@@ -0,0 +1,7 @@
+3
+3
+3
+3
+3
+3
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.adm
@@ -0,0 +1 @@
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_null/scalar_countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_null/scalar_countn_null.1.adm
new file mode 100644
index 0000000..b4f89e0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_null/scalar_countn_null.1.adm
@@ -0,0 +1,10 @@
+3
+3
+3
+3
+3
+3
+0
+0
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_01/countn_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_01/countn_01.1.adm
new file mode 100644
index 0000000..1d6b377
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_01/countn_01.1.adm
@@ -0,0 +1,4 @@
+3
+3
+{ "$1": 2, "n": "a" }
+{ "$1": 1, "n": "b" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_02/countn_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_02/countn_02.1.adm
new file mode 100644
index 0000000..65ede1c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_02/countn_02.1.adm
@@ -0,0 +1,7 @@
+3
+null
+1
+null
+null
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_distinct/countn_distinct.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_distinct/countn_distinct.1.adm
new file mode 100644
index 0000000..ae0b195
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_distinct/countn_distinct.1.adm
@@ -0,0 +1 @@
+{ "t1": 3, "t2": null, "t3": null, "t4": 3, "t5": 0 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_01/countn_empty_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_01/countn_empty_01.1.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_01/countn_empty_01.1.adm
@@ -0,0 +1 @@
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_02/countn_empty_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_02/countn_empty_02.1.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_02/countn_empty_02.1.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_null/countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_null/countn_null.1.adm
new file mode 100644
index 0000000..5fb8b09
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_null/countn_null.1.adm
@@ -0,0 +1 @@
+{ "count": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn/scalar_countn.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn/scalar_countn.1.adm
new file mode 100644
index 0000000..3230e04
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn/scalar_countn.1.adm
@@ -0,0 +1,7 @@
+3
+3
+3
+3
+3
+3
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_empty/scalar_countn_empty.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_empty/scalar_countn_empty.1.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_empty/scalar_countn_empty.1.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_null/scalar_countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_null/scalar_countn_null.1.adm
new file mode 100644
index 0000000..b533851
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_null/scalar_countn_null.1.adm
@@ -0,0 +1,10 @@
+null
+null
+null
+null
+null
+null
+null
+null
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson
index b3f8c7c..d478e8a 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson
@@ -16,6 +16,6 @@
 		"resultSize": "R{.*}",
 		"processedObjects": "R{.*}",
 		"bufferCacheHitRatio": "100.00%",
-		"bufferCachePageReadCount": 1
+		"bufferCachePageReadCount": "R{.*}"
 	}
 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
index 52981ae..9d53805 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
@@ -41,11 +41,13 @@
     "compiler\.indexonly" : true,
     "compiler\.internal\.sanitycheck" : true,
     "compiler\.joinmemory" : 262144,
+    "compiler\.max\.variable\.occurrences\.inlining" : 128,
     "compiler.min.groupmemory" : 524288,
     "compiler.min.joinmemory" : 524288,
     "compiler\.min\.memory\.allocation" : true,
     "compiler.min.sortmemory" : 524288,
     "compiler.min.windowmemory" : 524288,
+    "compiler.orderfields" : false,
     "compiler\.parallelism" : 0,
     "compiler.queryplanshape" : "zigzag",
     "compiler.runtime.memory.overhead" : 5,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
index 0d4967a..0b23562 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
@@ -41,11 +41,13 @@
     "compiler\.indexonly" : true,
     "compiler\.internal\.sanitycheck" : false,
     "compiler\.joinmemory" : 262144,
+    "compiler\.max\.variable\.occurrences\.inlining" : 128,
     "compiler.min.groupmemory" : 524288,
     "compiler.min.joinmemory" : 524288,
     "compiler\.min\.memory\.allocation" : true,
     "compiler.min.sortmemory" : 524288,
     "compiler.min.windowmemory" : 524288,
+    "compiler.orderfields" : false,
     "compiler\.parallelism" : -1,
     "compiler.queryplanshape" : "zigzag",
     "compiler.runtime.memory.overhead" : 5,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
index 0295ab2..0d77d05 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
@@ -41,11 +41,13 @@
     "compiler\.indexonly" : true,
     "compiler\.internal\.sanitycheck" : false,
     "compiler\.joinmemory" : 262144,
+    "compiler\.max\.variable\.occurrences\.inlining" : 128,
     "compiler.min.groupmemory" : 524288,
     "compiler.min.joinmemory" : 524288,
     "compiler\.min\.memory\.allocation" : true,
     "compiler.min.sortmemory" : 524288,
     "compiler.min.windowmemory" : 524288,
+    "compiler.orderfields" : false,
     "compiler\.parallelism" : 3,
     "compiler.queryplanshape" : "zigzag",
     "compiler.runtime.memory.overhead" : 5,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.adm
new file mode 100644
index 0000000..0e8c7ab
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.adm
@@ -0,0 +1 @@
+{ "t1": 2, "t2": [ 2, 3 ], "t3": null, "t4": null, "t5": null, "t6": [ 3 ], "t7": [ 2, 3, 4 ], "t8": [ 2, 3, 4, 5 ], "t9": null, "t10": null, "t11": [ 0, 1, 2, 3, 4, 5 ], "t12": 0, "t13": [ 0, 1, 2, 3 ], "t14": null, "t15": null, "t16": null, "t17": null, "t18": [ [ 1, 2 ] ], "t19": [ [ 1, 2, 3, 4, 5 ] ], "t20": [ [ 0, 1, 2 ] ], "t21": [ [ 0, 1, 2, 3, 4, 5 ] ], "t22": [ [ 0, 1, 2, 3, 4, 5 ] ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/collection-exists/result.040.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/collection-exists/result.040.regexjson
new file mode 100644
index 0000000..1e9e352
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/collection-exists/result.040.regexjson
@@ -0,0 +1,3 @@
+{
+    "size": "R{.*}"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/external-collection/result.002.ignore b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/external-collection/result.002.ignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/external-collection/result.002.ignore
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/index-exists/result.040.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/index-exists/result.040.regexjson
new file mode 100644
index 0000000..1e9e352
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/index-exists/result.040.regexjson
@@ -0,0 +1,3 @@
+{
+    "size": "R{.*}"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/sanity/result.040.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/sanity/result.040.regexjson
new file mode 100644
index 0000000..5cfd541
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/cloud_storage/storage-size/datasource-function/sanity/result.040.regexjson
@@ -0,0 +1 @@
+{"collectionAndIndexesLargerThanCollectionOnly":true,"collectionOnlyLargerThanIndexOnly":true}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.05.adm
new file mode 100644
index 0000000..8fc863e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-cover-data-types/parquet-cover-data-types.05.adm
@@ -0,0 +1 @@
+{ "name": "Virat", "id": 18, "dateType": date("1988-11-05"), "timeType": time("03:10:00.493"), "boolType": false, "doubleType": 0.75, "datetimeType": datetime("1900-02-01T00:00:00.000") }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-empty-array/parquet-empty-array.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-empty-array/parquet-empty-array.05.adm
new file mode 100644
index 0000000..97de7e9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-empty-array/parquet-empty-array.05.adm
@@ -0,0 +1,2 @@
+{ "id": 2, "name": { "first": [ "power", "star" ] } }
+{ "id": 5, "name": { "first": [  ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-simple/parquet-simple.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-simple/parquet-simple.04.adm
new file mode 100644
index 0000000..bf567b2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-simple/parquet-simple.04.adm
@@ -0,0 +1 @@
+{ "id": "123" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-tweet/parquet-tweet.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-tweet/parquet-tweet.05.adm
new file mode 100644
index 0000000..5e0df96
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-tweet/parquet-tweet.05.adm
@@ -0,0 +1,2 @@
+{ "coordinates": { "coordinates": [ 1.1 ], "type": "string" }, "created_at": "string", "entities": { "urls": [ { "display_url": "string", "expanded_url": "string", "indices": [ 1 ], "url": "string" } ], "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "geo": { "coordinates": [ 1.1 ], "type": "string" }, "id": "0000000", "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "place": { "bounding_box": { "coordinates": [ [ [ 1.1 ] ] ], "type": "string" }, "country": "string", "country_code": "string", "full_name": "string", "id": "string", "name": "string", "place_type": "string", "url": "string" }, "possibly_sensitive": true, "quoted_status": { "created_at": "string", "entities": { "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "id": 1, "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "verified": true } }, "quoted_status_id": 1, "quoted_status_id_str": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "timestamp_ms": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "location": "string", "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "time_zone": "string", "url": "string", "utc_offset": 1, "verified": true } }
+{ "coordinates": { "coordinates": [ 1.1 ], "type": "string" }, "created_at": "string", "favorite_count": 1, "favorited": true, "filter_level": "string", "geo": { "coordinates": [ 1.1 ], "type": "string" }, "id": "11111111111111111111", "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "place": { "bounding_box": { "coordinates": [ [ [ 1.1 ] ] ], "type": "string" }, "country": "string", "country_code": "string", "full_name": "string", "id": "string", "name": "string", "place_type": "string", "url": "string" }, "possibly_sensitive": true, "quoted_status": { "created_at": "string", "entities": { "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "id": 1, "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "verified": true } }, "quoted_status_id": 1, "quoted_status_id_str": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "timestamp_ms": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "location": "string", "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "time_zone": "string", "url": "string", "utc_offset": 1, "verified": true } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-utf8/parquet-utf8.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-utf8/parquet-utf8.05.adm
new file mode 100644
index 0000000..c60145d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to-hdfs/parquet-utf8/parquet-utf8.05.adm
@@ -0,0 +1,8 @@
+{ "id": 1, "name": "John" }
+{ "id": 2, "name": "Abel" }
+{ "id": 3, "name": "Sandy" }
+{ "id": 4, "name": "Alex" }
+{ "id": 5, "name": "Mike" }
+{ "id": 6, "name": "Tom" }
+{ "comment": "😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا", "id": 7, "name": "Jerry" }
+{ "comment": "😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا", "id": 8, "name": "William" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/delimiter/delimiter.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/delimiter/delimiter.05.adm
new file mode 100644
index 0000000..8722e43
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/delimiter/delimiter.05.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "Macbook1", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "Macbook2", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "Macbook3", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "Macbook4", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "Macbook5", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "Macbook6", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "Macbook7", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "Macbook8", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "Macbook9", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "Macbook10", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "Macbook11", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/delimiter/delimiter.07.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/delimiter/delimiter.07.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/delimiter/delimiter.07.adm
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.05.adm
new file mode 100644
index 0000000..8722e43
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.05.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "Macbook1", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "Macbook2", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "Macbook3", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "Macbook4", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "Macbook5", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "Macbook6", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "Macbook7", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "Macbook8", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "Macbook9", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "Macbook10", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "Macbook11", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.12.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.12.adm
new file mode 100644
index 0000000..5055d28
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.12.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.22.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.22.adm
new file mode 100644
index 0000000..90e44c4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.22.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "IamNull", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "IamNull", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "IamNull", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "IamNull", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "IamNull", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "IamNull", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "IamNull", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "IamNull", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "IamNull", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "IamNull", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "IamNull", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.32.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.32.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/header/header.32.adm
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/null/null.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/null/null.05.adm
new file mode 100644
index 0000000..90e44c4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/null/null.05.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "IamNull", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "IamNull", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "IamNull", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "IamNull", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "IamNull", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "IamNull", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "IamNull", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "IamNull", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "IamNull", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "IamNull", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "IamNull", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/quote-escape/quote-escape.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/quote-escape/quote-escape.04.adm
new file mode 100644
index 0000000..2f9e206
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/quote-escape/quote-escape.04.adm
@@ -0,0 +1,4 @@
+{ "id": 1, "name": "Macbook1", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "Macbook2", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "Macbook3", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "Mac|,book4", "amount": 234.5, "accountNumber": 567.89 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/quote-escape/quote-escape.13.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/quote-escape/quote-escape.13.adm
new file mode 100644
index 0000000..2f9e206
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/quote-escape/quote-escape.13.adm
@@ -0,0 +1,4 @@
+{ "id": 1, "name": "Macbook1", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "Macbook2", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "Macbook3", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "Mac|,book4", "amount": 234.5, "accountNumber": 567.89 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/simple-csv/simple-csv.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/simple-csv/simple-csv.05.adm
new file mode 100644
index 0000000..90e44c4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/simple-csv/simple-csv.05.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "IamNull", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "IamNull", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "IamNull", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "IamNull", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "IamNull", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "IamNull", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "IamNull", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "IamNull", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "IamNull", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "IamNull", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "IamNull", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/simple-csv/simple-csv.13.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/simple-csv/simple-csv.13.adm
new file mode 100644
index 0000000..2c305f4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/simple-csv/simple-csv.13.adm
@@ -0,0 +1,11 @@
+{ "id": 1, "name": "IamNull", "amount": 123.2, "accountNumber": 345.34 }
+{ "id": 2, "name": "Macbook2", "amount": 456.7, "accountNumber": 123.45 }
+{ "id": 3, "name": "Macbook3", "amount": 789.1, "accountNumber": 678.90 }
+{ "id": 4, "name": "Macbook4", "amount": 234.5, "accountNumber": 567.89 }
+{ "id": 5, "name": "Macbook5", "amount": 876.5, "accountNumber": 345.67 }
+{ "id": 6, "name": "Macbook6", "amount": 345.6, "accountNumber": 987.65 }
+{ "id": 7, "name": "Macbook7", "amount": 678.9, "accountNumber": 234.56 }
+{ "id": 8, "name": "Macbook8", "amount": 987.2, "accountNumber": 789.12 }
+{ "id": 9, "name": "Macbook9", "amount": 543.2, "accountNumber": 321.45 }
+{ "id": 10, "name": "Macbook10", "amount": 123.9, "accountNumber": 654.32 }
+{ "id": 11, "name": "Macbook11", "amount": 567.8, "accountNumber": 456.78 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/type-mismatch/type-mismatch.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/type-mismatch/type-mismatch.04.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/csv/type-mismatch/type-mismatch.04.adm
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.adm
new file mode 100644
index 0000000..8fc863e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.adm
@@ -0,0 +1 @@
+{ "name": "Virat", "id": 18, "dateType": date("1988-11-05"), "timeType": time("03:10:00.493"), "boolType": false, "doubleType": 0.75, "datetimeType": datetime("1900-02-01T00:00:00.000") }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-empty-array/parquet-empty-array.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-empty-array/parquet-empty-array.05.adm
new file mode 100644
index 0000000..97de7e9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-empty-array/parquet-empty-array.05.adm
@@ -0,0 +1,2 @@
+{ "id": 2, "name": { "first": [ "power", "star" ] } }
+{ "id": 5, "name": { "first": [  ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-heterogeneous/parquet-heterogeneous.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-heterogeneous/parquet-heterogeneous.05.adm
new file mode 100644
index 0000000..4ecacdb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-heterogeneous/parquet-heterogeneous.05.adm
@@ -0,0 +1,11 @@
+{ "id": 2 }
+{ "name": "virat", "id": 5 }
+{ "name": { "first": "virat" }, "id": 8 }
+{ "name": { "first": "virat" }, "id": 10, "age": 18 }
+{ "name": 123, "id": 12 }
+{ "name": [ 123, 456 ], "id": 15 }
+{ "name": 765, "id": 17 }
+{ "name": [ 789 ], "id": 20 }
+{ "name": [ { "first": "virat" } ], "id": 21 }
+{ "name": [ { "first": "virat", "second": "kohli" } ], "id": 27 }
+{ "name": { "first": "virat" }, "id": 28 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.05.adm
new file mode 100644
index 0000000..6b73640
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-partition-heterogeneous/parquet-partition-heterogeneous.05.adm
@@ -0,0 +1,100 @@
+{ "partitioner_key": "A", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 1, "randomField": 5678 }
+{ "partitioner_key": "A", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 2, "randomField": 18 }
+{ "partitioner_key": "C", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 3, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "C", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 4, "randomField": 5678 }
+{ "partitioner_key": "A", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 5, "randomField": "1234" }
+{ "partitioner_key": "C", "price": "101.50", "name": 3456, "active": true, "id": 6, "randomField": { "value": 7890 } }
+{ "partitioner_key": "A", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 7, "randomField": 18 }
+{ "partitioner_key": "B", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 8, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "A", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 9, "randomField": 18 }
+{ "partitioner_key": "B", "price": "101.50", "name": 3456, "active": true, "id": 10, "randomField": { "value": 7890 } }
+{ "partitioner_key": "B", "price": "101.50", "name": 3456, "active": true, "id": 11, "randomField": { "value": 7890 } }
+{ "partitioner_key": "B", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 12, "randomField": "5678" }
+{ "partitioner_key": "A", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 13, "randomField": "None" }
+{ "partitioner_key": "A", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 14, "randomField": 5678 }
+{ "partitioner_key": "C", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 15, "randomField": 18 }
+{ "partitioner_key": "C", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 16, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "B", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 17, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "C", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 18, "randomField": 5678 }
+{ "partitioner_key": "C", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 19, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "B", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 20, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "B", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 21, "randomField": "1234" }
+{ "partitioner_key": "A", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 22, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "C", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 23, "randomField": 18 }
+{ "partitioner_key": "B", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 24, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "C", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 25, "randomField": "None" }
+{ "partitioner_key": "A", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 26, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "A", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 27, "randomField": "5678" }
+{ "partitioner_key": "B", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 28, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "C", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 29, "randomField": "1234" }
+{ "partitioner_key": "C", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 30, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "C", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 31, "randomField": "5678" }
+{ "partitioner_key": "B", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 32, "randomField": 5678 }
+{ "partitioner_key": "C", "price": "101.50", "name": 3456, "active": true, "id": 33, "randomField": { "value": 7890 } }
+{ "partitioner_key": "B", "price": "101.50", "name": 3456, "active": true, "id": 34, "randomField": { "value": 7890 } }
+{ "partitioner_key": "B", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 35, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "B", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 36, "randomField": "None" }
+{ "partitioner_key": "B", "price": "101.50", "name": 3456, "active": true, "id": 37, "randomField": { "value": 7890 } }
+{ "partitioner_key": "A", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 38, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "B", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 39, "randomField": 5678 }
+{ "partitioner_key": "C", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 40, "randomField": "1234" }
+{ "partitioner_key": "A", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 41, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "A", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 42, "randomField": "5678" }
+{ "partitioner_key": "C", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 43, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "C", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 44, "randomField": 18 }
+{ "partitioner_key": "A", "price": 99.99, "name": "Virat Kohli", "active": true, "id": 45, "randomField": 1234 }
+{ "partitioner_key": "B", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 46, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "C", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 47, "randomField": "None" }
+{ "partitioner_key": "A", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 48, "randomField": "None" }
+{ "partitioner_key": "A", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 49, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "C", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 50, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "B", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 51, "randomField": "None" }
+{ "partitioner_key": "A", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 52, "randomField": "None" }
+{ "partitioner_key": "C", "price": "101.50", "name": 3456, "active": true, "id": 53, "randomField": { "value": 7890 } }
+{ "partitioner_key": "B", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 54, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "A", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 55, "randomField": "5678" }
+{ "partitioner_key": "B", "price": 99.99, "name": "Virat Kohli", "active": true, "id": 56, "randomField": 1234 }
+{ "partitioner_key": "B", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 57, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "B", "price": "101.50", "name": 3456, "active": true, "id": 58, "randomField": { "value": 7890 } }
+{ "partitioner_key": "A", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 59, "randomField": "None" }
+{ "partitioner_key": "C", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 60, "randomField": 18 }
+{ "partitioner_key": "C", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 61, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "A", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 62, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "A", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 63, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "C", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 64, "randomField": 18 }
+{ "partitioner_key": "A", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 65, "randomField": 5678 }
+{ "partitioner_key": "B", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 66, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "B", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 67, "randomField": "5678" }
+{ "partitioner_key": "C", "price": "101.50", "name": 3456, "active": true, "id": 68, "randomField": { "value": 7890 } }
+{ "partitioner_key": "C", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 69, "randomField": "None" }
+{ "partitioner_key": "A", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 70, "randomField": 18 }
+{ "partitioner_key": "A", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 71, "randomField": 18 }
+{ "partitioner_key": "B", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 72, "randomField": 18 }
+{ "partitioner_key": "C", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 73, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "C", "price": 99.99, "name": "Virat Kohli", "active": true, "id": 74, "randomField": 1234 }
+{ "partitioner_key": "A", "price": "101.50", "name": 3456, "active": true, "id": 75, "randomField": { "value": 7890 } }
+{ "partitioner_key": "B", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 76, "randomField": "None" }
+{ "partitioner_key": "A", "price": "one hundred", "name": false, "active": { "status": "active" }, "id": 77, "randomField": [ 1, 2, 3 ] }
+{ "partitioner_key": "A", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 78, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "C", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 79, "randomField": "1234" }
+{ "partitioner_key": "A", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 80, "randomField": "1234" }
+{ "partitioner_key": "C", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 81, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "A", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 82, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "A", "price": { "currency": "INR", "value": 99.99 }, "name": "None", "active": false, "id": 83, "randomField": "5678" }
+{ "partitioner_key": "A", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 84, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "C", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 85, "randomField": "None" }
+{ "partitioner_key": "C", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 86, "randomField": "1234" }
+{ "partitioner_key": "C", "price": { "amount": 99.99, "currency": "USD" }, "name": [ "Kohli", "Dhoni" ], "active": 1, "id": 87, "randomField": 18 }
+{ "partitioner_key": "A", "price": 100, "name": { "nickname": "VK" }, "active": [ true, false ], "id": 88, "randomField": 5678 }
+{ "partitioner_key": "C", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 89, "randomField": "1234" }
+{ "partitioner_key": "C", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 90, "randomField": "None" }
+{ "partitioner_key": "A", "price": 99.99, "name": "Virat Kohli", "active": true, "id": 91, "randomField": 1234 }
+{ "partitioner_key": "A", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 92, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "B", "price": "101.50", "name": 3456, "active": true, "id": 93, "randomField": { "value": 7890 } }
+{ "partitioner_key": "A", "price": 101.5, "name": { "last": "Dhoni", "first": "MS" }, "active": "no", "id": 94, "randomField": [ 9, 8, 7 ] }
+{ "partitioner_key": "B", "price": 50, "name": [ "Dhoni", "Kohli" ], "active": 0, "id": 95, "randomField": "None" }
+{ "partitioner_key": "B", "price": 99.99, "name": "Virat Kohli", "active": true, "id": 96, "randomField": 1234 }
+{ "partitioner_key": "A", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 97, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "B", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 98, "randomField": "1234" }
+{ "partitioner_key": "A", "price": [ 99, 100, 101 ], "name": 9876, "active": "active", "id": 99, "randomField": { "identifier": "ORD1234", "type": "order" } }
+{ "partitioner_key": "C", "price": "99.99", "name": { "last": "Kohli", "first": "Virat" }, "active": "yes", "id": 100, "randomField": "1234" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-simple/parquet-simple.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-simple/parquet-simple.04.adm
new file mode 100644
index 0000000..bf567b2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-simple/parquet-simple.04.adm
@@ -0,0 +1 @@
+{ "id": "123" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-tweet/parquet-tweet.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-tweet/parquet-tweet.05.adm
new file mode 100644
index 0000000..5e0df96
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-tweet/parquet-tweet.05.adm
@@ -0,0 +1,2 @@
+{ "coordinates": { "coordinates": [ 1.1 ], "type": "string" }, "created_at": "string", "entities": { "urls": [ { "display_url": "string", "expanded_url": "string", "indices": [ 1 ], "url": "string" } ], "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "geo": { "coordinates": [ 1.1 ], "type": "string" }, "id": "0000000", "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "place": { "bounding_box": { "coordinates": [ [ [ 1.1 ] ] ], "type": "string" }, "country": "string", "country_code": "string", "full_name": "string", "id": "string", "name": "string", "place_type": "string", "url": "string" }, "possibly_sensitive": true, "quoted_status": { "created_at": "string", "entities": { "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "id": 1, "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "verified": true } }, "quoted_status_id": 1, "quoted_status_id_str": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "timestamp_ms": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "location": "string", "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "time_zone": "string", "url": "string", "utc_offset": 1, "verified": true } }
+{ "coordinates": { "coordinates": [ 1.1 ], "type": "string" }, "created_at": "string", "favorite_count": 1, "favorited": true, "filter_level": "string", "geo": { "coordinates": [ 1.1 ], "type": "string" }, "id": "11111111111111111111", "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "place": { "bounding_box": { "coordinates": [ [ [ 1.1 ] ] ], "type": "string" }, "country": "string", "country_code": "string", "full_name": "string", "id": "string", "name": "string", "place_type": "string", "url": "string" }, "possibly_sensitive": true, "quoted_status": { "created_at": "string", "entities": { "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "id": 1, "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "verified": true } }, "quoted_status_id": 1, "quoted_status_id_str": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "timestamp_ms": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "location": "string", "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "time_zone": "string", "url": "string", "utc_offset": 1, "verified": true } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-utf8/parquet-utf8.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-utf8/parquet-utf8.05.adm
new file mode 100644
index 0000000..c60145d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-utf8/parquet-utf8.05.adm
@@ -0,0 +1,8 @@
+{ "id": 1, "name": "John" }
+{ "id": 2, "name": "Abel" }
+{ "id": 3, "name": "Sandy" }
+{ "id": 4, "name": "Alex" }
+{ "id": 5, "name": "Mike" }
+{ "id": 6, "name": "Tom" }
+{ "comment": "😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا", "id": 7, "name": "Jerry" }
+{ "comment": "😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا", "id": 8, "name": "William" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm
index 2a16ab1..06c68c8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm
@@ -1,14 +1,13 @@
-{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]],"crs":null} } }
-{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]],"crs":null} } }
-{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[11,10],[11,11],[10,11],[10,10]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
+{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93]} } }
+{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]]} } }
+{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]]} } }
+{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]]} } }
+{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]} } }
+{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]} } }
+{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829]} } }
+{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3]} } }
+{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]]} } }
+{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]]} } }
+{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]]} } }
+{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[10,11],[11,11],[11,10],[10,10]]]} } }
+{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]} } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm
index 6cba06a..688fcf1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm
@@ -1,14 +1,13 @@
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -118.4, "XMin": -118.4, "YMax": 33.93, "YMin": 33.93, "Binary": hex("01010000009A99999999995DC0D7A3703D0AF74040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-118.4,33.93],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT (-118.4 33.93)" }
-{ "Type": "Polygon", "Area": 1.3755215000294761E-8, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 6, "XMax": 8.760178, "XMin": 8.7599721, "YMax": 49.7103478, "YMin": 49.7102133, "Binary": hex("01030000000100000007000000B1BE26101B852140ED20C033EBDA4840C11DA8531E8521407694394CEADA4840BAF8DB9E20852140D5F89683EADA484098EF2AB5288521409557F844E8DA48400247020D3685214041F74086E9DA484014F131B32A8521408DE43CADECDA4840B1BE26101B852140ED20C033EBDA4840"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((8.7599721 49.7103028, 8.759997 49.7102752, 8.7600145 49.7102818, 8.7600762 49.7102133, 8.760178 49.7102516, 8.7600914 49.7103478, 8.7599721 49.7103028))" }
-{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 13, "XMax": -69.1972972, "XMin": -69.199136, "YMax": -12.5998133, "YMin": -12.6010968, "Binary": hex("01020000000D00000081BF4EA0BE4C51C0CE80C4C0843329C033DDEBA4BE4C51C0D388997D1E3329C064D7ACE9B04C51C05787927F1C3329C02347DF49B04C51C0B9C49107223329C0F4DCE79DAF4C51C054B76922223329C0184339D1AE4C51C042A89C40223329C048E17A14AE4C51C010DBCCD71B3329C007DB3E9AA04C51C0636996BA1A3329C0E9656F84A04C51C05ADD45F35B3329C04E9B711AA24C51C033373A42613329C06CE22E0CA24C51C066D24B31AC3329C05F454607A44C51C065170CAEB93329C07C8C03F9A34C51C0E475B3F5C23329C0"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "LINESTRING (-69.1991349 -12.6006222, -69.199136 -12.599842, -69.1982979 -12.5998268, -69.1982598 -12.599869, -69.1982188 -12.5998698, -69.19817 -12.5998707, -69.198125 -12.5998218, -69.1973024 -12.5998133, -69.1972972 -12.6003109, -69.197394 -12.6003514, -69.1973906 -12.6009231, -69.1975115 -12.601026, -69.1975081 -12.6010968)" }
-{ "Type": "MultiPoint", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 4, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003E4001010000000000000000003440000000000000344001010000000000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiPoint\",\"coordinates\":[[10,40],[40,30],[20,20],[30,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))" }
-{ "Type": "MultiLineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 7, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003E400000000000003E40000000000000444000000000000034400000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiLineString\",\"coordinates\":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]],\"crs\":null}", "WKT": "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))" }
-{ "Type": "MultiPolygon", "Area": 712.5, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 11, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 5.0, "Binary": hex("01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003E4000000000000044400000000000004440010300000002000000060000000000000000003440000000000080414000000000000024400000000000003E40000000000000244000000000000024400000000000003E4000000000000014400000000000804640000000000000344000000000000034400000000000804140040000000000000000003E40000000000000344000000000000034400000000000002E40000000000000344000000000000039400000000000003E400000000000003440"), "GeoJSON": "{\"type\":\"MultiPolygon\",\"coordinates\":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]],\"crs\":null}", "WKT": "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), ((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), (30 20, 20 15, 20 25, 30 20)))" }
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -71.1043443253471, "XMin": -71.1043443253471, "YMax": 42.3150676015829, "YMin": 42.3150676015829, "Binary": hex("0101000000E538D293ADC651C0F3699A2254284540"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-71.1043443253471,42.3150676015829],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT (-71.1043443253471 42.3150676015829)" }
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 3, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("01E9030000000000000000F03F00000000000000400000000000000840"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT Z (1 2 3)" }
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 4, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("01B90B0000000000000000F03F000000000000004000000000000008400000000000001040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3,4],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT ZM (1 2 3 4)" }
-{ "Type": "Polygon", "Area": 928.625, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 743265.625, "XMin": 743238.0, "YMax": 2967450.0, "YMin": 2967416.0, "Binary": hex("01030000000100000005000000000000008CAE264100000000BCA3464100000040C3AE264100000000BCA3464100000000C2AE264100000000CDA34641000000008CAE264100000000CDA34641000000008CAE264100000000BCA34641"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((743238 2967416, 743265.625 2967416, 743265 2967450, 743238 2967450, 743238 2967416))" }
-{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 2, "XMax": -113.98, "XMin": -113.981, "YMax": 39.198, "YMin": 39.195, "Binary": hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-113.98,39.198],[-113.981,39.195]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "LINESTRING (-113.98 39.198, -113.981 39.195)" }
-{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 3, "XMax": 7.0, "XMin": 1.0, "YMax": 8.0, "YMin": 2.0, "Binary": hex("010200000003000000000000000000F03F0000000000000040000000000000104000000000000014400000000000001C400000000000002040"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[1,2],[4,5],[7,8]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "LINESTRING (1 2, 4 5, 7 8)" }
-{ "Type": "Polygon", "Area": 1.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 11.0, "XMin": 10.0, "YMax": 11.0, "YMin": 10.0, "Binary": hex("010300000001000000050000000000000000002440000000000000244000000000000026400000000000002440000000000000264000000000000026400000000000002440000000000000264000000000000024400000000000002440"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[10,10],[11,10],[11,11],[10,11],[10,10]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((10 10, 11 10, 11 11, 10 11, 10 10))" }
-{ "Type": "Polygon", "Area": 675.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 7, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 10.0, "Binary": hex("0103000000020000000500000000000000008041400000000000002440000000000080464000000000008046400000000000002E40000000000000444000000000000024400000000000003440000000000080414000000000000024400400000000000000000034400000000000003E40000000000080414000000000008041400000000000003E40000000000000344000000000000034400000000000003E40"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))" }
+{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -118.4, "XMin": -118.4, "YMax": 33.93, "YMin": 33.93, "Binary": hex("01010000009A99999999995DC0D7A3703D0AF74040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-118.4,33.93]}", "WKT": "POINT (-118.4 33.93)" }
+{ "Type": "Polygon", "Area": 1.3755215000294761E-8, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 6, "XMax": 8.760178, "XMin": 8.7599721, "YMax": 49.7103478, "YMin": 49.7102133, "Binary": hex("01030000000100000007000000B1BE26101B852140ED20C033EBDA4840C11DA8531E8521407694394CEADA4840BAF8DB9E20852140D5F89683EADA484098EF2AB5288521409557F844E8DA48400247020D3685214041F74086E9DA484014F131B32A8521408DE43CADECDA4840B1BE26101B852140ED20C033EBDA4840"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]]}", "WKT": "POLYGON ((8.7599721 49.7103028, 8.759997 49.7102752, 8.7600145 49.7102818, 8.7600762 49.7102133, 8.760178 49.7102516, 8.7600914 49.7103478, 8.7599721 49.7103028))" }
+{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 13, "XMax": -69.1972972, "XMin": -69.199136, "YMax": -12.5998133, "YMin": -12.6010968, "Binary": hex("01020000000D00000081BF4EA0BE4C51C0CE80C4C0843329C033DDEBA4BE4C51C0D388997D1E3329C064D7ACE9B04C51C05787927F1C3329C02347DF49B04C51C0B9C49107223329C0F4DCE79DAF4C51C054B76922223329C0184339D1AE4C51C042A89C40223329C048E17A14AE4C51C010DBCCD71B3329C007DB3E9AA04C51C0636996BA1A3329C0E9656F84A04C51C05ADD45F35B3329C04E9B711AA24C51C033373A42613329C06CE22E0CA24C51C066D24B31AC3329C05F454607A44C51C065170CAEB93329C07C8C03F9A34C51C0E475B3F5C23329C0"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]]}", "WKT": "LINESTRING (-69.1991349 -12.6006222, -69.199136 -12.599842, -69.1982979 -12.5998268, -69.1982598 -12.599869, -69.1982188 -12.5998698, -69.19817 -12.5998707, -69.198125 -12.5998218, -69.1973024 -12.5998133, -69.1972972 -12.6003109, -69.197394 -12.6003514, -69.1973906 -12.6009231, -69.1975115 -12.601026, -69.1975081 -12.6010968)" }
+{ "Type": "MultiPoint", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 4, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003E4001010000000000000000003440000000000000344001010000000000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiPoint\",\"coordinates\":[[10,40],[40,30],[20,20],[30,10]]}", "WKT": "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))" }
+{ "Type": "MultiLineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 7, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003E400000000000003E40000000000000444000000000000034400000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiLineString\",\"coordinates\":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}", "WKT": "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))" }
+{ "Type": "MultiPolygon", "Area": 712.5, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 11, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 5.0, "Binary": hex("01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003E4000000000000044400000000000004440010300000002000000060000000000000000003440000000000080414000000000000024400000000000003E40000000000000244000000000000024400000000000003E4000000000000014400000000000804640000000000000344000000000000034400000000000804140040000000000000000003E40000000000000344000000000000034400000000000002E40000000000000344000000000000039400000000000003E400000000000003440"), "GeoJSON": "{\"type\":\"MultiPolygon\",\"coordinates\":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]}", "WKT": "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), ((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), (30 20, 20 15, 20 25, 30 20)))" }
+{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -71.1043443253471, "XMin": -71.1043443253471, "YMax": 42.3150676015829, "YMin": 42.3150676015829, "Binary": hex("0101000000E538D293ADC651C0F3699A2254284540"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-71.1043443253471,42.3150676015829]}", "WKT": "POINT (-71.1043443253471 42.3150676015829)" }
+{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 3, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("0101000080000000000000F03F00000000000000400000000000000840"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3]}", "WKT": "POINT Z(1 2 3)" }
+{ "Type": "Polygon", "Area": 928.625, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 743265.625, "XMin": 743238.0, "YMax": 2967450.0, "YMin": 2967416.0, "Binary": hex("01030000000100000005000000000000008CAE264100000000BCA34641000000008CAE264100000000CDA3464100000000C2AE264100000000CDA3464100000040C3AE264100000000BCA34641000000008CAE264100000000BCA34641"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]]}", "WKT": "POLYGON ((743238 2967416, 743238 2967450, 743265 2967450, 743265.625 2967416, 743238 2967416))" }
+{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 2, "XMax": -113.98, "XMin": -113.981, "YMax": 39.198, "YMin": 39.195, "Binary": hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-113.98,39.198],[-113.981,39.195]]}", "WKT": "LINESTRING (-113.98 39.198, -113.981 39.195)" }
+{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 3, "XMax": 7.0, "XMin": 1.0, "YMax": 8.0, "YMin": 2.0, "Binary": hex("010200000003000000000000000000F03F0000000000000040000000000000104000000000000014400000000000001C400000000000002040"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[1,2],[4,5],[7,8]]}", "WKT": "LINESTRING (1 2, 4 5, 7 8)" }
+{ "Type": "Polygon", "Area": 1.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 11.0, "XMin": 10.0, "YMax": 11.0, "YMin": 10.0, "Binary": hex("010300000001000000050000000000000000002440000000000000244000000000000024400000000000002640000000000000264000000000000026400000000000002640000000000000244000000000000024400000000000002440"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[10,10],[10,11],[11,11],[11,10],[10,10]]]}", "WKT": "POLYGON ((10 10, 10 11, 11 11, 11 10, 10 10))" }
+{ "Type": "Polygon", "Area": 675.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 7, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 10.0, "Binary": hex("0103000000020000000500000000000000008041400000000000002440000000000080464000000000008046400000000000002E40000000000000444000000000000024400000000000003440000000000080414000000000000024400400000000000000000034400000000000003E40000000000080414000000000008041400000000000003E40000000000000344000000000000034400000000000003E40"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}", "WKT": "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm
index 0815bfa..5ea6fd5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm
@@ -1,4 +1,3 @@
-{ "X": -118.4, "Y": 33.93, "Z": 0.0, "M": NaN }
-{ "X": -71.1043443253471, "Y": 42.3150676015829, "Z": 0.0, "M": NaN }
-{ "X": 1.0, "Y": 2.0, "Z": 3.0, "M": NaN }
-{ "X": 1.0, "Y": 2.0, "Z": 3.0, "M": 4.0 }
\ No newline at end of file
+{ "X": -118.4, "Y": 33.93, "Z": NaN, "M": NaN }
+{ "X": -71.1043443253471, "Y": 42.3150676015829, "Z": NaN, "M": NaN }
+{ "X": 1.0, "Y": 2.0, "Z": 3.0, "M": NaN }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm
index 044e31a..39ced5e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm
@@ -1,4 +1,4 @@
-{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[10,10],[11,10],[11,11],[10,11],[10,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "NumInteriorRings": 1, "ExteriorRing": {"type":"LineString","coordinates":[[35,10],[45,45],[15,40],[10,20],[35,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
\ No newline at end of file
+{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]} }
+{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]} }
+{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[10,10],[10,11],[11,11],[11,10],[10,10]]} }
+{ "NumInteriorRings": 1, "ExteriorRing": {"type":"LineString","coordinates":[[35,10],[45,45],[15,40],[10,20],[35,10]]} }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm
index 7967722..3a026d5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm
@@ -1,4 +1,4 @@
-{ "Length": 0.004058119099397876, "Boundary": {"type":"MultiPoint","coordinates":[[-69.1991349,-12.6006222],[-69.1975081,-12.6010968]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Length": 78.9292222699217, "Boundary": {"type":"MultiPoint","coordinates":[[10,10],[10,40],[40,40],[30,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Length": 0.0031622776601655037, "Boundary": {"type":"MultiPoint","coordinates":[[-113.98,39.198],[-113.981,39.195]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Length": 8.48528137423857, "Boundary": {"type":"MultiPoint","coordinates":[[1,2],[7,8]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
+{ "Length": 0.004058119099397876, "Boundary": {"type":"MultiPoint","coordinates":[[-69.1991349,-12.6006222],[-69.1975081,-12.6010968]]} }
+{ "Length": 78.9292222699217, "Boundary": {"type":"MultiPoint","coordinates":[[10,10],[10,40],[30,10],[40,40]]} }
+{ "Length": 0.0031622776601655037, "Boundary": {"type":"MultiPoint","coordinates":[[-113.98,39.198],[-113.981,39.195]]} }
+{ "Length": 8.48528137423857, "Boundary": {"type":"MultiPoint","coordinates":[[1,2],[7,8]]} }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm
index c0d2341..1987970 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm
@@ -1,3 +1,3 @@
-{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "StartPoint": {"type":"Point","coordinates":[-69.1991349,-12.6006222],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Envelope": {"type":"Polygon","coordinates":[[[-69.199136,-12.6010968],[-69.1972972,-12.6010968],[-69.1972972,-12.5998133],[-69.199136,-12.5998133],[-69.199136,-12.6010968]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "StartPoint": {"type":"Point","coordinates":[-113.98,39.198],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Envelope": {"type":"Polygon","coordinates":[[[-113.981,39.195],[-113.98,39.195],[-113.98,39.198],[-113.981,39.198],[-113.981,39.195]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "StartPoint": {"type":"Point","coordinates":[1,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Envelope": {"type":"Polygon","coordinates":[[[1,2],[7,2],[7,8],[1,8],[1,2]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
\ No newline at end of file
+{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842]}, "StartPoint": {"type":"Point","coordinates":[-69.1991349,-12.6006222]}, "Envelope": {"type":"Polygon","coordinates":[[[-69.199136,-12.6010968],[-69.199136,-12.5998133],[-69.1972972,-12.5998133],[-69.1972972,-12.6010968],[-69.199136,-12.6010968]]]} }
+{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842]}, "StartPoint": {"type":"Point","coordinates":[-113.98,39.198]}, "Envelope": {"type":"Polygon","coordinates":[[[-113.981,39.195],[-113.981,39.198],[-113.98,39.198],[-113.98,39.195],[-113.981,39.195]]]} }
+{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842]}, "StartPoint": {"type":"Point","coordinates":[1,2]}, "Envelope": {"type":"Polygon","coordinates":[[[1,2],[1,8],[7,8],[7,2],[1,2]]]} }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm
index 893f2dc..8a42de2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm
@@ -7,7 +7,6 @@
 { "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
 { "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
 { "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
-{ "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
 { "IsClosed": false, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
 { "IsClosed": false, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
 { "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm
index ea141c9..bebded7 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm
@@ -1,14 +1,13 @@
-{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]],"crs":null} } }
-{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]],"crs":null} } }
-{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[11,10],[11,11],[10,11],[10,10]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
\ No newline at end of file
+{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93]} } }
+{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]]} } }
+{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]]} } }
+{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]]} } }
+{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]} } }
+{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]} } }
+{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829]} } }
+{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3]} } }
+{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]]} } }
+{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]]} } }
+{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]]} } }
+{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[10,11],[11,11],[11,10],[10,10]]]} } }
+{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]} } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm
index 2ceb6e0..7628d62 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm
@@ -1,3 +1,2 @@
 131
-132
 135
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm
index a1e16dd..bfa005e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm
@@ -1 +1 @@
-{"type":"Polygon","coordinates":[[[-7,4.2],[-7.1,5],[-7.1,4.3],[-7,4.2]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}
\ No newline at end of file
+{"type":"Polygon","coordinates":[[[-7,4.2],[-7.1,5],[-7.1,4.3],[-7,4.2]]]}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.15.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.15.adm
new file mode 100644
index 0000000..9243a76
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.15.adm
@@ -0,0 +1,14 @@
+NaN
+NaN
+0.0
+0.0
+3.0
+1.0
+-1.0
+-3.0
+1.0
+-1.0
+2.0
+0.0
+0.0
+-1.0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm
index e353336..57d5885 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm
@@ -1,10 +1,10 @@
-{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[0,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[0,2],[1,2],[2,2]],[[2,2],[3,3]],[[3,3],[4,2],[5,2]],[[3,3],[4,4],[5,5],[6,6]]],"crs":null}, "Intersection": {"type":"LineString","coordinates":[[2,2],[3,3]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"MultiLineString","coordinates":[[[0,2],[1,2],[2,2],[1,1]],[[5,2],[4,2],[3,3],[4,4],[5,5],[6,6]]],"crs":null} }
-{ "Union": {"type":"LineString","coordinates":[[0,0],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[0,0],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"LineString","coordinates":[[1,1],[-1,-1],[2,3.5],[1,3],[1,2],[2,1]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[-1,-1],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[1,1],[-1,-1],[2,3.5],[1,3],[1,2],[2,1]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]]],"crs":null}, "Intersection": {"type":"Point","coordinates":[2,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]]],"crs":null} }
-{ "Union": {"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"MultiPolygon","coordinates":[],"crs":null}, "SymDifference": {"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"LineString","coordinates":[[0,0],[5,5],[10,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"LineString","coordinates":[[0,0],[5,5],[10,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"MultiLineString","coordinates":[],"crs":null} }
-{ "Union": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]],"crs":null}, "Intersection": {"type":"MultiPolygon","coordinates":[],"crs":null}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]],"crs":null} }
-{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[0,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"Polygon","coordinates":[[[0,0],[1,0],[1,1],[0,1],[0,0]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Polygon","coordinates":[[[0.25,0.25],[0.5,0.25],[0.5,0.5],[0.25,0.5],[0.25,0.25]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"Polygon","coordinates":[[[0,0],[1,0],[1,1],[0,1],[0,0]],[[0.25,0.25],[0.25,0.5],[0.5,0.5],[0.5,0.25],[0.25,0.25]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
+{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]}, "Intersection": {"type":"Point","coordinates":[0,2]}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[2,2],[3,3]],[[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[3,3],[4,2],[5,2]]]}, "Intersection": {"type":"LineString","coordinates":[[2,2],[3,3]]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[3,3],[4,2],[5,2]]]} }
+{ "Union": {"type":"LineString","coordinates":[[0,0],[0,2]]}, "Intersection": {"type":"Point","coordinates":[0,0]}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[0,2]]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[-1,-1],[1,2]],[[1,2],[2,3.5],[1,3],[1,2]],[[1,2],[2,1]]]}, "Intersection": {"type":"Point","coordinates":[-1,-1]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[-1,-1],[1,2]],[[1,2],[2,3.5],[1,3],[1,2]],[[1,2],[2,1]]]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]]]}, "Intersection": {"type":"Point","coordinates":[2,2]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]]]} }
+{ "Union": {"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[0,0]},{"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]]}]}, "Intersection": , "SymDifference": {"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[0,0]},{"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]]}]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[0,0],[5,5]],[[5,5],[10,10]]]}, "Intersection": {"type":"MultiLineString","coordinates":[[[0,0],[5,5]],[[5,5],[10,10]]]}, "SymDifference": {"type":"LineString","coordinates":[]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]]}, "Intersection": {"type":"LineString","coordinates":[]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]]} }
+{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]}, "Intersection": {"type":"Point","coordinates":[0,2]}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]} }
+{ "Union": {"type":"Polygon","coordinates":[[[0,0],[0,1],[1,1],[1,0],[0,0]]]}, "Intersection": {"type":"Polygon","coordinates":[[[0.25,0.25],[0.25,0.5],[0.5,0.5],[0.5,0.25],[0.25,0.25]]]}, "SymDifference": {"type":"Polygon","coordinates":[[[0,0],[0,1],[1,1],[1,0],[0,0]],[[0.25,0.25],[0.5,0.25],[0.5,0.5],[0.25,0.5],[0.25,0.25]]]} }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm
index 5123e09..d7e9872 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm
@@ -1 +1 @@
-{"type":"MultiPoint","coordinates":[[1,1],[1,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}
\ No newline at end of file
+{"type":"MultiPoint","coordinates":[[1,1],[1,2]]}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.03.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.03.adm
new file mode 100644
index 0000000..07e109a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.03.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 6, "name": "fsdxv😀", "age": 17 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.04.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.04.plan
new file mode 100644
index 0000000..16bae90
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.04.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "fsdxv😀"), lt($$20, "fsdxv😁"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["fsdxv😀", "fsdxv😁"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.05.adm
new file mode 100644
index 0000000..45d43d5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.05.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 4, "name": "asdgdh👩‍👩‍👧dfsd", "age": 23 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.06.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.06.plan
new file mode 100644
index 0000000..63fb5a2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.06.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "asdgdh👩‍👩‍👧"), lt($$20, "asdgdh👩‍👩‍👨"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["asdgdh👩‍👩‍👧", "asdgdh👩‍👩‍👨"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.07.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.07.adm
new file mode 100644
index 0000000..e4ac10b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.07.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.08.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.08.plan
new file mode 100644
index 0000000..5c149d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.08.plan
@@ -0,0 +1,34 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (eq($$ds1.getField(1), "sxvciis")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              unnest-map [$$17, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$22, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- BTREE_SEARCH  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  order (ASC, $$22) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- STABLE_SORT [$$22(ASC)]  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      project ([$$22]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          unnest-map [$$21, $$22] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$19, 1, $$20, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- BTREE_SEARCH  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              assign [$$19, $$20] <- ["sxvciis", "sxvciis"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- ASSIGN  |PARTITIONED|
+                                empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.09.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.09.adm
new file mode 100644
index 0000000..4ca49aa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.09.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 1, "name": "John", "age": 21 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.10.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.10.plan
new file mode 100644
index 0000000..04efe1d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.10.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "Joh"), lt($$20, "Joi"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["Joh", "Joi"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.11.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.11.adm
new file mode 100644
index 0000000..202bfb1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.11.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.12.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.12.plan
new file mode 100644
index 0000000..ffb44e9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.12.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "s%vfjs__jiscii%")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.13.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.13.adm
new file mode 100644
index 0000000..6bbea7c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.13.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.14.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.14.plan
new file mode 100644
index 0000000..af4d893
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.14.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvc_is")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.15.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.15.adm
new file mode 100644
index 0000000..6bbea7c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.15.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.16.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.16.plan
new file mode 100644
index 0000000..4f75093
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.16.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvc_i%")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.17.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.17.adm
new file mode 100644
index 0000000..202bfb1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.17.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.18.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.18.plan
new file mode 100644
index 0000000..6f6047f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.18.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvfjs%jisci%")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.19.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.19.adm
new file mode 100644
index 0000000..3d747c3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.19.adm
@@ -0,0 +1,2 @@
+{ "ds1": { "id": 1, "name": "John", "age": 21 } }
+{ "ds1": { "id": 8, "name": "Join", "age": 21 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.20.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.20.plan
new file mode 100644
index 0000000..d1db279
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.20.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "J"), lt($$20, "K"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["J", "K"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.21.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.21.adm
new file mode 100644
index 0000000..be3fef3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.21.adm
@@ -0,0 +1,2 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.22.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.22.plan
new file mode 100644
index 0000000..b577614
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/index-selection/like-expression/like-expression.22.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "sx"), lt($$20, "sy"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["sx", "sy"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.adm
new file mode 100644
index 0000000..2c44767
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.adm
@@ -0,0 +1,2 @@
+{ "Category 1": 1, "Category 2": 1, "Category 3": 0, "Category 4": 0, "Category 5": 0, "Category 6": 1, "Category 7": 0, "Category 8": 0, "Category 9": 1, "Category 10": 1, "Category 11": 0, "Category 12": 1, "Category 13": 1, "Category 14": 1, "Category 15": 0, "Category 16": 1, "Category 17": 1, "Category 18": 1, "Category 19": 1, "Category 20": 1, "Category 21": 1, "Category 22": 1, "Category 23": 0, "Category 24": 0, "Category 25": 0, "Category 26": 0, "itemid": 10, "description": "ABC" }
+{ "Category 1": 1, "Category 2": 1, "Category 3": 1, "Category 4": 0, "Category 5": 1, "Category 6": 0, "Category 7": 1, "Category 8": 1, "Category 9": 1, "Category 10": 1, "Category 11": 0, "Category 12": 0, "Category 13": 0, "Category 14": 0, "Category 15": 0, "Category 16": 0, "Category 17": 0, "Category 18": 0, "Category 19": 1, "Category 20": 1, "Category 21": 1, "Category 22": 1, "Category 23": 1, "Category 24": 0, "Category 25": 0, "Category 26": 0, "itemid": 12, "description": "XYZ" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/collection-exists/result.040.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/collection-exists/result.040.regexjson
new file mode 100644
index 0000000..1e9e352
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/collection-exists/result.040.regexjson
@@ -0,0 +1,3 @@
+{
+    "size": "R{.*}"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/external-collection/result.002.ignore b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/external-collection/result.002.ignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/external-collection/result.002.ignore
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/index-exists/result.040.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/index-exists/result.040.regexjson
new file mode 100644
index 0000000..1e9e352
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/index-exists/result.040.regexjson
@@ -0,0 +1,3 @@
+{
+    "size": "R{.*}"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/sanity/result.040.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/sanity/result.040.regexjson
new file mode 100644
index 0000000..5cfd541
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/storage-size/datasource-function/sanity/result.040.regexjson
@@ -0,0 +1 @@
+{"collectionAndIndexesLargerThanCollectionOnly":true,"collectionOnlyLargerThanIndexOnly":true}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan
index 7fe9bcc..6428718 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan
@@ -1,72 +1,72 @@
-distribute result [$$70] [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+distribute result [$$70] [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
 -- DISTRIBUTE_RESULT  |UNPARTITIONED|
-  exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+  exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
   -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
-    aggregate [$$70] <- [agg-sql-sum($$76)] [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+    aggregate [$$70] <- [agg-sql-sum($$76)] [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
     -- AGGREGATE  |UNPARTITIONED|
-      exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+      exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
       -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
-        aggregate [$$76] <- [agg-sql-count(1)] [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+        aggregate [$$76] <- [agg-sql-count(1)] [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
         -- AGGREGATE  |PARTITIONED|
-          exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+          exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-            join (or(eq($$71, "7"), neq($$69, 0))) [cardinality: 6.0, op-cost: 12.6, total-cost: 28.6]
+            join (or(eq($$71, "7"), neq($$69, 0))) [cardinality: 6.0, op-cost: 12.6, total-cost: 23.6]
             -- NESTED_LOOP  |PARTITIONED|
-              exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+              exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                project ([$$71]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                project ([$$71]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                 -- STREAM_PROJECT  |PARTITIONED|
-                  assign [$$71] <- [$$p.getField("arrayOrObject").getField("text")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                  assign [$$71] <- [$$p.getField("arrayOrObject").getField("text")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                   -- ASSIGN  |PARTITIONED|
-                    project ([$$p]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                    project ([$$p]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                     -- STREAM_PROJECT  |PARTITIONED|
-                      assign [$$p] <- [$$b2] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                      assign [$$p] <- [$$b2] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                       -- ASSIGN  |PARTITIONED|
-                        exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                        exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                          replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                          replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                           -- REPLICATE  |PARTITIONED|
-                            exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                            exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                              project ([$$b2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                              project ([$$b2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                               -- STREAM_PROJECT  |PARTITIONED|
-                                exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                                exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                  data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+                                  data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
                                   -- DATASOURCE_SCAN  |PARTITIONED|
                                     exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                       empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
                                       -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-              exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+              exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
               -- BROADCAST_EXCHANGE  |PARTITIONED|
-                aggregate [$$69] <- [agg-sum($$75)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+                aggregate [$$69] <- [agg-sum($$75)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                 -- AGGREGATE  |UNPARTITIONED|
-                  exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+                  exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                   -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
-                    aggregate [$$75] <- [agg-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+                    aggregate [$$75] <- [agg-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                     -- AGGREGATE  |PARTITIONED|
-                      select (eq($$ao.getField("text"), "1")) [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+                      select (eq($$ao.getField("text"), "1")) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                       -- STREAM_SELECT  |PARTITIONED|
-                        project ([$$ao]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                        project ([$$ao]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                         -- STREAM_PROJECT  |PARTITIONED|
-                          unnest $$ao <- scan-collection($$72) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                          unnest $$ao <- scan-collection($$72) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                           -- UNNEST  |PARTITIONED|
-                            project ([$$72]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                            project ([$$72]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                             -- STREAM_PROJECT  |PARTITIONED|
-                              assign [$$72] <- [$$b2.getField("arrayOrObject")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                              assign [$$72] <- [$$b2.getField("arrayOrObject")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                               -- ASSIGN  |PARTITIONED|
-                                exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                                exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                  replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                                  replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                                   -- REPLICATE  |PARTITIONED|
-                                    exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                                    exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                      project ([$$b2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                                      project ([$$b2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                                       -- STREAM_PROJECT  |PARTITIONED|
-                                        exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                                        exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                          data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+                                          data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
                                           -- DATASOURCE_SCAN  |PARTITIONED|
                                             exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
                                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan
index 70f874e..345fc12 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan
@@ -1,8 +1,8 @@
-distribute result [$$63] [cardinality: 2.1, op-cost: 0.0, total-cost: 20.41]
+distribute result [$$63] [cardinality: 2.1, op-cost: 0.0, total-cost: 15.41]
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  exchange [cardinality: 2.1, op-cost: 0.0, total-cost: 20.41]
+  exchange [cardinality: 2.1, op-cost: 0.0, total-cost: 15.41]
   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-    join (true) [cardinality: 2.1, op-cost: 4.41, total-cost: 20.41]
+    join (true) [cardinality: 2.1, op-cost: 4.41, total-cost: 15.41]
     -- NESTED_LOOP  |PARTITIONED|
       exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -10,59 +10,59 @@
         -- STREAM_PROJECT  |PARTITIONED|
           select (eq($$p.getField("arrayOrObject").getField("text"), "7")) [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
           -- STREAM_SELECT  |PARTITIONED|
-            project ([$$p]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+            project ([$$p]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
             -- STREAM_PROJECT  |PARTITIONED|
-              assign [$$p] <- [$$p2] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+              assign [$$p] <- [$$p2] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
               -- ASSIGN  |PARTITIONED|
-                exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                  replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                  replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                   -- REPLICATE  |PARTITIONED|
-                    exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                    exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                      project ([$$p2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                      project ([$$p2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                       -- STREAM_PROJECT  |PARTITIONED|
-                        exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                        exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                          data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+                          data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
                           -- DATASOURCE_SCAN  |PARTITIONED|
                             exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                               empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
-      exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+      exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
       -- BROADCAST_EXCHANGE  |PARTITIONED|
-        aggregate [$$63] <- [agg-sql-sum($$67)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+        aggregate [$$63] <- [agg-sql-sum($$67)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
         -- AGGREGATE  |UNPARTITIONED|
-          exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+          exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
           -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
-            aggregate [$$67] <- [agg-sql-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+            aggregate [$$67] <- [agg-sql-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
             -- AGGREGATE  |PARTITIONED|
-              select (and(ge($$60, "1"), le($$60, "2"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+              select (and(ge($$60, "1"), le($$60, "2"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
               -- STREAM_SELECT  |PARTITIONED|
-                project ([$$60]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                project ([$$60]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                 -- STREAM_PROJECT  |PARTITIONED|
-                  assign [$$60] <- [$$ao.getField("text")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                  assign [$$60] <- [$$ao.getField("text")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                   -- ASSIGN  |PARTITIONED|
-                    project ([$$ao]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                    project ([$$ao]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                     -- STREAM_PROJECT  |PARTITIONED|
-                      unnest $$ao <- scan-collection($$65) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                      unnest $$ao <- scan-collection($$65) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                       -- UNNEST  |PARTITIONED|
-                        project ([$$65]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                        project ([$$65]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                         -- STREAM_PROJECT  |PARTITIONED|
-                          assign [$$65] <- [$$p2.getField("arrayOrObject")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                          assign [$$65] <- [$$p2.getField("arrayOrObject")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                           -- ASSIGN  |PARTITIONED|
-                            exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                            exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                              replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                              replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                               -- REPLICATE  |PARTITIONED|
-                                exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                                exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                                 -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                  project ([$$p2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+                                  project ([$$p2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
                                   -- STREAM_PROJECT  |PARTITIONED|
-                                    exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+                                    exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
                                     -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                      data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+                                      data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
                                       -- DATASOURCE_SCAN  |PARTITIONED|
                                         exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.03.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.03.adm
new file mode 100644
index 0000000..07e109a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.03.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 6, "name": "fsdxv😀", "age": 17 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.04.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.04.plan
new file mode 100644
index 0000000..131d7f2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.04.plan
@@ -0,0 +1,24 @@
+distribute result [$$19] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "fsdxv😀"), lt($$20, "fsdxv😁"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$21, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.05.adm
new file mode 100644
index 0000000..45d43d5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.05.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 4, "name": "asdgdh👩‍👩‍👧dfsd", "age": 23 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.06.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.06.plan
new file mode 100644
index 0000000..16cbb40
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.06.plan
@@ -0,0 +1,24 @@
+distribute result [$$19] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "asdgdh👩‍👩‍👧"), lt($$20, "asdgdh👩‍👩‍👨"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$21, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.07.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.07.adm
new file mode 100644
index 0000000..e4ac10b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.07.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.08.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.08.plan
new file mode 100644
index 0000000..bb00128
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.08.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        select (eq($$ds1.getField(1), "sxvciis")) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.09.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.09.adm
new file mode 100644
index 0000000..4ca49aa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.09.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 1, "name": "John", "age": 21 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.10.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.10.plan
new file mode 100644
index 0000000..4f7d8c3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.10.plan
@@ -0,0 +1,24 @@
+distribute result [$$19] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "Joh"), lt($$20, "Joi"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$21, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.11.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.11.adm
new file mode 100644
index 0000000..202bfb1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.11.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.12.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.12.plan
new file mode 100644
index 0000000..d31cb55
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.12.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "s%vfjs__jiscii%")) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.13.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.13.adm
new file mode 100644
index 0000000..6bbea7c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.13.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.14.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.14.plan
new file mode 100644
index 0000000..af9784e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.14.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvc_is")) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.15.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.15.adm
new file mode 100644
index 0000000..6bbea7c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.15.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.16.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.16.plan
new file mode 100644
index 0000000..5f1ee43
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.16.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvc_i%")) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.17.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.17.adm
new file mode 100644
index 0000000..202bfb1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.17.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.18.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.18.plan
new file mode 100644
index 0000000..bd31c2e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.18.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvfjs%jisci%")) [cardinality: 1.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.19.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.19.adm
new file mode 100644
index 0000000..3d747c3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.19.adm
@@ -0,0 +1,2 @@
+{ "ds1": { "id": 1, "name": "John", "age": 21 } }
+{ "ds1": { "id": 8, "name": "Join", "age": 21 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.20.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.20.plan
new file mode 100644
index 0000000..b21c4ee
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.20.plan
@@ -0,0 +1,24 @@
+distribute result [$$19] [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "J"), lt($$20, "K"))) [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$21, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.21.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.21.adm
new file mode 100644
index 0000000..be3fef3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.21.adm
@@ -0,0 +1,2 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.22.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.22.plan
new file mode 100644
index 0000000..9922a01
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/index-selection/like-expression/like-expression.22.plan
@@ -0,0 +1,24 @@
+distribute result [$$19] [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "sx"), lt($$20, "sy"))) [cardinality: 2.0, op-cost: 0.0, total-cost: 10.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 10.0, op-cost: 0.0, total-cost: 10.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$21, $$ds1] <- test.ds1 [cardinality: 10.0, op-cost: 10.0, total-cost: 10.0]
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/create_dataset_with_filter_on_meta/create_dataset_with_filter_on_meta.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/create_dataset_with_filter_on_meta/create_dataset_with_filter_on_meta.1.adm
index e460060..1f0dd69 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/create_dataset_with_filter_on_meta/create_dataset_with_filter_on_meta.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/create_dataset_with_filter_on_meta/create_dataset_with_filter_on_meta.1.adm
@@ -1,3 +1,3 @@
-{ "DatabaseName": "Default", "DataverseName": "KeyVerse", "DatasetName": "KVStore", "IndexName": "KVStore", "IndexStructure": "BTREE", "SearchKey": [ [ "key" ] ], "IsPrimary": true, "Timestamp": "Sat Sep 30 23:12:54 PDT 2023", "PendingOp": 0, "SearchKeySourceIndicator": [ 1 ] }
-{ "DatabaseName": "Default", "DataverseName": "KeyVerse", "DatasetName": "KVStore", "IndexName": "s_location", "IndexStructure": "RTREE", "SearchKey": [ [ "location" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:12:54 PDT 2023", "PendingOp": 0 }
-{ "DatabaseName": "Default", "DataverseName": "KeyVerse", "DatasetName": "KVStore", "IndexName": "s_rating", "IndexStructure": "BTREE", "SearchKey": [ [ "area_code" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:12:54 PDT 2023", "PendingOp": 0, "ExcludeUnknownKey": false }
\ No newline at end of file
+{ "DatabaseName": "Default", "DataverseName": "KeyVerse", "DatasetName": "KVStore", "IndexName": "KVStore", "IndexStructure": "BTREE", "SearchKey": [ [ "key" ] ], "IsPrimary": true, "Timestamp": "Mon Oct 14 07:37:28 UTC 2024", "PendingOp": 0, "SearchKeySourceIndicator": [ 1 ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "KeyVerse", "DatasetName": "KVStore", "IndexName": "s_location", "IndexStructure": "RTREE", "SearchKey": [ [ "location" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:12:54 PDT 2023", "PendingOp": 0, "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "KeyVerse", "DatasetName": "KVStore", "IndexName": "s_rating", "IndexStructure": "BTREE", "SearchKey": [ [ "area_code" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:12:54 PDT 2023", "PendingOp": 0, "ExcludeUnknownKey": false, "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/index-cast-null/index-cast-null.015.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/index-cast-null/index-cast-null.015.adm
index 52eaa75..797b6a6 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/index-cast-null/index-cast-null.015.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/ddl/index-cast-null/index-cast-null.015.adm
@@ -1 +1 @@
-{ "DatabaseName": "Default", "DataverseName": "test", "DatasetName": "ds2", "IndexName": "ds2_o_idx2", "IndexStructure": "BTREE", "SearchKey": [ [ "o_s_f" ], [ "o_d_f" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:18:19 PDT 2023", "PendingOp": 0, "SearchKeyType": [ "double", "string" ], "ExcludeUnknownKey": false, "Cast": { "Default": null } }
\ No newline at end of file
+{ "DatabaseName": "Default", "DataverseName": "test", "DatasetName": "ds2", "IndexName": "ds2_o_idx2", "IndexStructure": "BTREE", "SearchKey": [ [ "o_s_f" ], [ "o_d_f" ] ], "IsPrimary": false, "Timestamp": "Mon Oct 14 07:37:29 UTC 2024", "PendingOp": 0, "SearchKeyType": [ "double", "string" ], "ExcludeUnknownKey": false, "Cast": { "Default": null }, "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/fulltext/stopwords-full-text-filter-1/stopwords-full-text-filter-1.7.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/fulltext/stopwords-full-text-filter-1/stopwords-full-text-filter-1.7.adm
index 0003857..73c6eb5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/fulltext/stopwords-full-text-filter-1/stopwords-full-text-filter-1.7.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/fulltext/stopwords-full-text-filter-1/stopwords-full-text-filter-1.7.adm
@@ -1,4 +1,4 @@
-{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "MyMessageDataset", "IndexStructure": "BTREE", "SearchKey": [ [ "myMessageId" ] ], "IsPrimary": true, "Timestamp": "Sat Sep 30 23:33:32 PDT 2023", "PendingOp": 0 }
-{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "message_ft_index_0", "IndexStructure": "SINGLE_PARTITION_WORD_INVIX", "SearchKey": [ [ "myMessageBody" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:33:33 PDT 2023", "PendingOp": 0 }
-{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "message_ft_index_1", "IndexStructure": "SINGLE_PARTITION_WORD_INVIX", "SearchKey": [ [ "myMessageBody" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:33:34 PDT 2023", "PendingOp": 0, "FullTextConfig": "my_first_stopword_config" }
-{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "message_ft_index_2", "IndexStructure": "SINGLE_PARTITION_WORD_INVIX", "SearchKey": [ [ "myMessageBody" ] ], "IsPrimary": false, "Timestamp": "Sat Sep 30 23:33:34 PDT 2023", "PendingOp": 0, "FullTextConfig": "my_second_stopword_config" }
\ No newline at end of file
+{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "MyMessageDataset", "IndexStructure": "BTREE", "SearchKey": [ [ "myMessageId" ] ], "IsPrimary": true, "Timestamp": "Wed Oct 09 14:02:40 IST 2024", "PendingOp": 0, "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "message_ft_index_0", "IndexStructure": "SINGLE_PARTITION_WORD_INVIX", "SearchKey": [ [ "myMessageBody" ] ], "IsPrimary": false, "Timestamp": "Wed Oct 09 14:04:53 IST 2024", "PendingOp": 0, "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "message_ft_index_1", "IndexStructure": "SINGLE_PARTITION_WORD_INVIX", "SearchKey": [ [ "myMessageBody" ] ], "IsPrimary": false, "Timestamp": "Wed Oct 09 14:06:04 IST 2024", "PendingOp": 0, "FullTextConfig": "my_first_stopword_config", "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "MyDataVerse", "DatasetName": "MyMessageDataset", "IndexName": "message_ft_index_2", "IndexStructure": "SINGLE_PARTITION_WORD_INVIX", "SearchKey": [ [ "myMessageBody" ] ], "IsPrimary": false, "Timestamp": "Wed Oct 09 14:08:21 IST 2024", "PendingOp": 0, "FullTextConfig": "my_second_stopword_config", "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/synonym/synonym-01/synonym-01.9.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/synonym/synonym-01/synonym-01.9.adm
index 6427b88..886eef8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/synonym/synonym-01/synonym-01.9.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/synonym/synonym-01/synonym-01.9.adm
@@ -1,4 +1,4 @@
-{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TweetMessagesSynonym", "ObjectDataverseName": "test", "ObjectName": "TweetMessages", "ObjectDatabaseName": "Default" }
-{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TweetMessagesSynonym2", "ObjectDataverseName": "test", "ObjectName": "TweetMessagesSynonym", "ObjectDatabaseName": "Default" }
-{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TwitterUsersSynonym", "ObjectDataverseName": "test", "ObjectName": "TwitterUsers", "ObjectDatabaseName": "Default" }
-{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TwitterUsersSynonym2", "ObjectDataverseName": "test", "ObjectName": "TwitterUsersSynonym", "ObjectDatabaseName": "Default" }
+{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TweetMessagesSynonym", "ObjectDataverseName": "test", "ObjectName": "TweetMessages", "ObjectDatabaseName": "Default", "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TweetMessagesSynonym2", "ObjectDataverseName": "test", "ObjectName": "TweetMessagesSynonym", "ObjectDatabaseName": "Default", "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TwitterUsersSynonym", "ObjectDataverseName": "test", "ObjectName": "TwitterUsers", "ObjectDatabaseName": "Default", "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
+{ "DatabaseName": "Default", "DataverseName": "test", "SynonymName": "TwitterUsersSynonym2", "ObjectDataverseName": "test", "ObjectName": "TwitterUsersSynonym", "ObjectDatabaseName": "Default", "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/user-defined-functions/udf32_metadata/udf32_metadata.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/user-defined-functions/udf32_metadata/udf32_metadata.2.adm
index a1c751b..7556190 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/user-defined-functions/udf32_metadata/udf32_metadata.2.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cloud/user-defined-functions/udf32_metadata/udf32_metadata.2.adm
@@ -1,8 +1,8 @@
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn001", "Arity": "0", "Params": [  ], "ReturnType": "", "Definition": "42", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn002", "Arity": "1", "Params": [ "a" ], "ReturnType": "", "Definition": "a", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn003", "Arity": "2", "Params": [ "a", "b" ], "ReturnType": "", "Definition": "a + b", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn004", "Arity": "-1", "Params": [ "args" ], "ReturnType": "", "Definition": "args", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn005", "Arity": "1", "Params": [ "a" ], "ReturnType": "", "Definition": "a", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn005", "Arity": "2", "Params": [ "a", "b" ], "ReturnType": "", "Definition": "a + b", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn006", "Arity": "-1", "Params": [ "args" ], "ReturnType": "", "Definition": "args", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
-{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn006", "Arity": "1", "Params": [ "a" ], "ReturnType": "", "Definition": "a", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ] } }
\ No newline at end of file
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn001", "Arity": "0", "Params": [  ], "ReturnType": "", "Definition": "42", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn002", "Arity": "1", "Params": [ "a" ], "ReturnType": "", "Definition": "a", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn003", "Arity": "2", "Params": [ "a", "b" ], "ReturnType": "", "Definition": "a + b", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn004", "Arity": "-1", "Params": [ "args" ], "ReturnType": "", "Definition": "args", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn005", "Arity": "1", "Params": [ "a" ], "ReturnType": "", "Definition": "a", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn005", "Arity": "2", "Params": [ "a", "b" ], "ReturnType": "", "Definition": "a + b", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn006", "Arity": "-1", "Params": [ "args" ], "ReturnType": "", "Definition": "args", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
+{ "fn": { "DatabaseName": "Default", "DataverseName": "test", "Name": "myfn006", "Arity": "1", "Params": [ "a" ], "ReturnType": "", "Definition": "a", "Language": "SQLPP", "Kind": "SCALAR", "Dependencies": [ [  ], [  ], [  ] ], "Creator": { "Name": "@sys", "Uuid": "97c793f3-bcbf-4595-8bf0-e9d6a5953523" } } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.03.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.03.adm
new file mode 100644
index 0000000..07e109a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.03.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 6, "name": "fsdxv😀", "age": 17 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.04.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.04.plan
new file mode 100644
index 0000000..24d5e9e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.04.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "fsdxv😀"), lt($$20, "fsdxv😁"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) range-filter on: and(ge($$ds1.getField(1), "fsdxv😀"), lt($$ds1.getField(1), "fsdxv😁")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["fsdxv😀", "fsdxv😁"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.05.adm
new file mode 100644
index 0000000..45d43d5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.05.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 4, "name": "asdgdh👩‍👩‍👧dfsd", "age": 23 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.06.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.06.plan
new file mode 100644
index 0000000..965a847
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.06.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "asdgdh👩‍👩‍👧"), lt($$20, "asdgdh👩‍👩‍👨"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) range-filter on: and(ge($$ds1.getField(1), "asdgdh👩‍👩‍👧"), lt($$ds1.getField(1), "asdgdh👩‍👩‍👨")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["asdgdh👩‍👩‍👧", "asdgdh👩‍👩‍👨"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.07.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.07.adm
new file mode 100644
index 0000000..e4ac10b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.07.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.08.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.08.plan
new file mode 100644
index 0000000..33927d1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.08.plan
@@ -0,0 +1,34 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (eq($$ds1.getField(1), "sxvciis")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              unnest-map [$$17, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$22, true, true, true) range-filter on: eq($$ds1.getField(1), "sxvciis") [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- BTREE_SEARCH  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  order (ASC, $$22) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- STABLE_SORT [$$22(ASC)]  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      project ([$$22]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          unnest-map [$$21, $$22] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$19, 1, $$20, true, true, true) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- BTREE_SEARCH  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              assign [$$19, $$20] <- ["sxvciis", "sxvciis"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- ASSIGN  |PARTITIONED|
+                                empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.09.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.09.adm
new file mode 100644
index 0000000..4ca49aa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.09.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 1, "name": "John", "age": 21 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.10.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.10.plan
new file mode 100644
index 0000000..975da5f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.10.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "Joh"), lt($$20, "Joi"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) range-filter on: and(ge($$ds1.getField(1), "Joh"), lt($$ds1.getField(1), "Joi")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["Joh", "Joi"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.11.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.11.adm
new file mode 100644
index 0000000..202bfb1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.11.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.12.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.12.plan
new file mode 100644
index 0000000..78ba609
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.12.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "s%vfjs__jiscii%")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 filter on: like($$ds1.getField(1), "s%vfjs__jiscii%") [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.13.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.13.adm
new file mode 100644
index 0000000..6bbea7c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.13.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.14.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.14.plan
new file mode 100644
index 0000000..0d3a72c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.14.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvc_is")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 filter on: like($$ds1.getField(1), "sxvc_is") [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.15.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.15.adm
new file mode 100644
index 0000000..6bbea7c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.15.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.16.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.16.plan
new file mode 100644
index 0000000..fd8fbf2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.16.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvc_i%")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 filter on: like($$ds1.getField(1), "sxvc_i%") [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.17.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.17.adm
new file mode 100644
index 0000000..202bfb1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.17.adm
@@ -0,0 +1 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.18.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.18.plan
new file mode 100644
index 0000000..f3df0b6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.18.plan
@@ -0,0 +1,20 @@
+distribute result [$$16] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$16]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$16] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        select (like($$ds1.getField(1), "sxvfjs%jisci%")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_SELECT  |PARTITIONED|
+          project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan []<-[$$17, $$ds1] <- test.ds1 filter on: like($$ds1.getField(1), "sxvfjs%jisci%") [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.19.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.19.adm
new file mode 100644
index 0000000..3d747c3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.19.adm
@@ -0,0 +1,2 @@
+{ "ds1": { "id": 1, "name": "John", "age": 21 } }
+{ "ds1": { "id": 8, "name": "Join", "age": 21 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.20.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.20.plan
new file mode 100644
index 0000000..ec023bd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.20.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "J"), lt($$20, "K"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) range-filter on: and(ge($$ds1.getField(1), "J"), lt($$ds1.getField(1), "K")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["J", "K"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.21.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.21.adm
new file mode 100644
index 0000000..be3fef3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.21.adm
@@ -0,0 +1,2 @@
+{ "ds1": { "id": 9, "name": "sxvfjshfjisciis", "age": 13 } }
+{ "ds1": { "id": 5, "name": "sxvciis", "age": 13 } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.22.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.22.plan
new file mode 100644
index 0000000..746beb7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_column/index-selection/like-expression/like-expression.22.plan
@@ -0,0 +1,38 @@
+distribute result [$$19] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    project ([$$19]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+    -- STREAM_PROJECT  |PARTITIONED|
+      assign [$$19] <- [{"ds1": $$ds1}] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+      -- ASSIGN  |PARTITIONED|
+        project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+        -- STREAM_PROJECT  |PARTITIONED|
+          select (and(ge($$20, "sx"), lt($$20, "sy"))) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+          -- STREAM_SELECT  |PARTITIONED|
+            assign [$$20] <- [$$ds1.getField(1)] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+            -- ASSIGN  |PARTITIONED|
+              project ([$$ds1]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  unnest-map [$$21, $$ds1] <- index-search("ds1", 0, "Default", "test", "ds1", false, false, 1, $$25, 1, $$25, true, true, true) range-filter on: and(ge($$ds1.getField(1), "sx"), lt($$ds1.getField(1), "sy")) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                  -- BTREE_SEARCH  |PARTITIONED|
+                    exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      order (ASC, $$25) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                      -- STABLE_SORT [$$25(ASC)]  |PARTITIONED|
+                        exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$25]) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              unnest-map [$$24, $$25] <- index-search("ds1_name", 0, "Default", "test", "ds1", false, false, 1, $$22, 1, $$23, true, false, false) [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  assign [$$22, $$23] <- ["sx", "sy"] [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                  -- ASSIGN  |PARTITIONED|
+                                    empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/string/like_01/like_01.3.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/string/like_01/like_01.3.ast
index d5c1517..c9e6664 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/string/like_01/like_01.3.ast
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/string/like_01/like_01.3.ast
@@ -10,13 +10,7 @@
     LiteralExpr [STRING] [A8BBB]
     LiteralExpr [STRING] [_6%]
   ]
-  FunctionCall asterix.like@2[
-    LiteralExpr [STRING] [+0300]
-    LiteralExpr [STRING] [+03%]
-  ]
-  FunctionCall asterix.like@2[
-    LiteralExpr [STRING] [?0300]
-    LiteralExpr [STRING] [?03%]
-  ]
+  LiteralExpr [STRING] [+0300]
+  LiteralExpr [STRING] [?0300]
 ]
 ]
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml
index 422d913..71ba960 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml
@@ -631,6 +631,36 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate">
+      <compilation-unit name="countn_01">
+        <output-dir compare="Text">countn_01</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="countn_02">
+        <output-dir compare="Text">countn_02</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="countn_empty_01">
+        <output-dir compare="Text">countn_empty_01</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="countn_empty_02">
+        <output-dir compare="Text">countn_empty_02</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="countn_null">
+        <output-dir compare="Text">countn_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="countn_distinct">
+        <output-dir compare="Text">countn_distinct</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
       <compilation-unit name="kurtosis_double">
         <output-dir compare="Text">kurtosis_double</output-dir>
       </compilation-unit>
@@ -766,6 +796,21 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate">
+      <compilation-unit name="scalar_countn">
+        <output-dir compare="Text">scalar_countn</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="scalar_countn_empty">
+        <output-dir compare="Text">scalar_countn_empty</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="scalar_countn_null">
+        <output-dir compare="Text">scalar_countn_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
       <compilation-unit name="scalar_kurtosis">
         <output-dir compare="Text">scalar_kurtosis</output-dir>
       </compilation-unit>
@@ -1909,6 +1954,36 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate-sql">
+      <compilation-unit name="countn_01">
+        <output-dir compare="Text">countn_01</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="countn_02">
+        <output-dir compare="Text">countn_02</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="countn_empty_01">
+        <output-dir compare="Text">countn_empty_01</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="countn_empty_02">
+        <output-dir compare="Text">countn_empty_02</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="countn_null">
+        <output-dir compare="Text">countn_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="countn_distinct">
+        <output-dir compare="Text">countn_distinct</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
       <compilation-unit name="kurtosis_double">
         <output-dir compare="Text">kurtosis_double</output-dir>
       </compilation-unit>
@@ -2044,6 +2119,21 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate-sql">
+      <compilation-unit name="scalar_countn">
+        <output-dir compare="Text">scalar_countn</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="scalar_countn_empty">
+        <output-dir compare="Text">scalar_countn_empty</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="scalar_countn_null">
+        <output-dir compare="Text">scalar_countn_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
       <compilation-unit name="scalar_kurtosis">
         <output-dir compare="Text">scalar_kurtosis</output-dir>
       </compilation-unit>
@@ -3046,6 +3136,11 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="array_fun">
+      <compilation-unit name="array_slice/array_slice_bracket_notation">
+        <output-dir compare="Text">array_slice/array_slice_bracket_notation</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="array_fun">
       <compilation-unit name="array_slice/array_slice_double_argument">
         <output-dir compare="Text">array_slice/array_slice_double_argument</output-dir>
       </compilation-unit>
@@ -6587,6 +6682,11 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="index-selection">
+      <compilation-unit name="like-expression">
+        <output-dir compare="Text">like-expression</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="index-selection">
       <compilation-unit name="verify">
         <output-dir compare="Text">verify</output-dir>
       </compilation-unit>
@@ -7436,6 +7536,11 @@
         <output-dir compare="Text">remove_listify</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="misc">
+      <compilation-unit name="query-ASTERIXDB-3410">
+        <output-dir compare="Text">query-ASTERIXDB-3410</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="multipart-dataverse">
     <test-case FilePath="multipart-dataverse">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_cloud_storage.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_cloud_storage.xml
index 6a75394..03fa519 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_cloud_storage.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_cloud_storage.xml
@@ -44,5 +44,65 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="storage-size">
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="collection-exists">
+        <output-dir compare="Clean-JSON">collection-exists</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="non-constant-argument">
+        <output-dir compare="Clean-JSON">non-constant-argument</output-dir>
+        <expected-error>Compilation error: Function asterix.storage-size expects constant arguments while arg[0] is of type VARIABLE</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="collection-does-not-exist">
+        <output-dir compare="Clean-JSON">collection-does-not-exist</output-dir>
+        <expected-error>Cannot find dataset with name doesNotExistCollection in dataverse testDatabase.testScope</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="non-string-argument">
+        <output-dir compare="Clean-JSON">non-string-argument</output-dir>
+        <expected-error>Type mismatch: function storage-size expects its 1st input parameter to be of type string, but the actual input type is bigint</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="view-not-collection">
+        <output-dir compare="Clean-JSON">view-not-collection</output-dir>
+        <expected-error>Cannot find dataset with name testCollection_vw in dataverse testDatabase.testScope</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="external-collection">
+        <output-dir compare="Clean-JSON">external-collection</output-dir>
+        <expected-error>Retrieving storage size is not applicable to type: EXTERNAL.</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="index-exists">
+        <output-dir compare="Clean-JSON">index-exists</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="index-does-not-exist">
+        <output-dir compare="Clean-JSON">index-does-not-exist</output-dir>
+        <expected-error>Cannot find index with name testCollection_idx_fake</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="invalid-arguments-count">
+        <output-dir compare="Clean-JSON">invalid-arguments-count</output-dir>
+        <expected-error>Invalid number of arguments for function storage-size</expected-error>
+        <expected-error>Invalid number of arguments for function storage-size</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="cloud_storage/storage-size/datasource-function">
+      <compilation-unit name="sanity">
+        <output-dir compare="Clean-JSON">sanity</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
   &sqlpp_queries;
 </test-suite>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
index 2c7af49..ee9dc3b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
@@ -40,6 +40,41 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-simple">
+        <output-dir compare="Text">parquet-simple</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-tweet">
+        <output-dir compare="Text">parquet-tweet</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-partition-heterogeneous">
+        <output-dir compare="Text">parquet-partition-heterogeneous</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-utf8">
+        <output-dir compare="Text">parquet-utf8</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-heterogeneous">
+        <output-dir compare="Text">parquet-heterogeneous</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-cover-data-types">
+        <output-dir compare="Text">parquet-cover-data-types</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
+      <compilation-unit name="parquet-empty-array">
+        <output-dir compare="Text">parquet-empty-array</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to">
       <compilation-unit name="empty-path">
         <output-dir compare="Text">empty-path</output-dir>
       </compilation-unit>
@@ -79,12 +114,31 @@
     <test-case FilePath="copy-to/negative">
       <compilation-unit name="supported-adapter-format-compression">
         <output-dir compare="Text">supported-adapter-format-compression</output-dir>
-        <expected-error>ASX1188: Unsupported writing adapter 'AZUREBLOB'. Supported adapters: [gcs, localfs, s3]</expected-error>
-        <expected-error>ASX1189: Unsupported writing format 'csv'. Supported formats: [json]</expected-error>
-        <expected-error>ASX1096: Unknown compression scheme rar. Supported schemes are [gzip]</expected-error>
+        <expected-error>ASX1188: Unsupported writing adapter 'AZUREBLOB'. Supported adapters: [gcs, hdfs, localfs, s3]</expected-error>
+        <expected-error>ASX1189: Unsupported writing format 'avro'. Supported formats: [csv, json, parquet]</expected-error>
+        <expected-error>ASX1202: Unsupported compression scheme rar. Supported schemes for json are [gzip]</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="copy-to/negative">
+      <compilation-unit name="parquet-error-checks">
+        <output-dir compare="Text">parquet-error-checks</output-dir>
+        <expected-error>ASX0037: Type mismatch: expected value of type BINARY, but got the value of type bigint</expected-error>
+        <expected-error>HYR0132: Extra field in the result, field 'second' does not exist at 'nested' in the schema</expected-error>
+        <expected-error>HYR0131: Result does not follow the schema, group type expected but found primitive type at 'nested'</expected-error>
+        <expected-error>HYR0131: Result does not follow the schema, primitive type expected but found group type at 'name'</expected-error>
+        <expected-error>ASX1206: Storage units expected for the field 'row-group-size' (e.g., 0.1KB, 100kb, 1mb, 3MB, 8.5GB ...). Provided 'random'</expected-error>
+        <expected-error>ASX1206: Storage units expected for the field 'page-size' (e.g., 0.1KB, 100kb, 1mb, 3MB, 8.5GB ...). Provided 'random'</expected-error>
+        <expected-error>ASX1202: Unsupported compression scheme rar. Supported schemes for parquet are [gzip, snappy, zstd]</expected-error>
+        <expected-error>ASX1001: Syntax error</expected-error>
+        <expected-error>ASX1204: 'binary' type not supported in parquet format</expected-error>
+        <expected-error>ASX1205: Invalid Parquet Writer Version provided '3'. Supported values: [1, 2]</expected-error>
+        <expected-error>ASX0039: Expected integer value, got yvghc (in line 22, at column 6)</expected-error>
+        <expected-error>ASX1209: Maximum value allowed for 'max-schemas' is 10. Found 15</expected-error>
+        <expected-error>HYR0133: Schema could not be inferred, empty types found in the result</expected-error>
+        <expected-error>HYR0134: Schema Limit exceeded, maximum number of heterogeneous schemas allowed : '2'</expected-error>
+        </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to/negative">
       <compilation-unit name="empty-over">
         <output-dir compare="Text">empty-over</output-dir>
         <expected-error>ASX1001: Syntax error: OVER-clause cannot be empty</expected-error>
@@ -97,6 +151,47 @@
         <expected-error>Expected integer value, got hello</expected-error>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="copy-to/negative">
+      <compilation-unit name="csv-error-checks">
+        <output-dir compare="Text">csv-error-checks</output-dir>
+        <expected-error>ASX1079: Compilation error: TYPE/AS Expression is required for csv format</expected-error>
+        <expected-error>ASX1082: Cannot find datatype with name wrongDataType (in line 27, at column 4)</expected-error>
+        <expected-error>ASX3124: 'ABCD' is not a valid quote. The length of a quote should be 1</expected-error>
+        <expected-error>ASX3049: 'wrongDelimiter' is not a valid delimiter. The length of a delimiter should be 1</expected-error>
+        <expected-error>ASX3126: 'wrongEscape' is not a valid escape. The length of a escape should be 1</expected-error>
+        <expected-error>ASX3125: 'ABCD' is not a valid force-quote input. The length of a force-quote input should be 1 character</expected-error>
+        <expected-error>ASX1207: 'object' type not supported in csv format</expected-error>
+        <expected-error>ASX1207: 'array' type not supported in csv format</expected-error>
+        <expected-error>Syntax error: Both 'TYPE()' and 'AS()' are provided. Please use either 'TYPE()' or 'AS()'.</expected-error>
+      </compilation-unit>
+    </test-case>
+  </test-group>
+  <test-group name="copy-to/csv">
+    <test-case FilePath="copy-to/csv">
+      <compilation-unit name="simple-csv">
+        <output-dir compare="Text">simple-csv</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to/csv">
+      <compilation-unit name="type-mismatch">
+        <output-dir compare="Text">type-mismatch</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to/csv">
+      <compilation-unit name="delimiter">
+        <output-dir compare="Text">delimiter</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to/csv">
+      <compilation-unit name="header">
+        <output-dir compare="Text">header</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to/csv">
+      <compilation-unit name="quote-escape">
+        <output-dir compare="Text">quote-escape</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="aws-s3-external-dataset">
     <test-case FilePath="external-dataset">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp_hdfs.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp_hdfs.xml
index 9e39211..d14746a 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp_hdfs.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp_hdfs.xml
@@ -66,4 +66,31 @@
       </test-case>
       -->
   </test-group>
+  <test-group name="copy-to">
+    <test-case FilePath="copy-to-hdfs">
+      <compilation-unit name="parquet-simple">
+        <output-dir compare="Text">parquet-simple</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to-hdfs">
+      <compilation-unit name="parquet-tweet">
+        <output-dir compare="Text">parquet-tweet</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to-hdfs">
+      <compilation-unit name="parquet-utf8">
+        <output-dir compare="Text">parquet-utf8</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to-hdfs">
+      <compilation-unit name="parquet-cover-data-types">
+        <output-dir compare="Text">parquet-cover-data-types</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="copy-to-hdfs">
+      <compilation-unit name="parquet-empty-array">
+        <output-dir compare="Text">parquet-empty-array</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
 </test-suite>
diff --git a/asterixdb/asterix-cloud/pom.xml b/asterixdb/asterix-cloud/pom.xml
index 659a6d7..9244979 100644
--- a/asterixdb/asterix-cloud/pom.xml
+++ b/asterixdb/asterix-cloud/pom.xml
@@ -16,181 +16,258 @@
  ! specific language governing permissions and limitations
  ! under the License.
  !-->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <artifactId>apache-asterixdb</artifactId>
-    <groupId>org.apache.asterix</groupId>
-    <version>0.9.10-SNAPSHOT</version>
-  </parent>
-  <artifactId>asterix-cloud</artifactId>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <artifactId>apache-asterixdb</artifactId>
+        <groupId>org.apache.asterix</groupId>
+        <version>0.9.10-SNAPSHOT</version>
+    </parent>
+    <artifactId>asterix-cloud</artifactId>
 
-  <licenses>
-    <license>
-      <name>Apache License, Version 2.0</name>
-      <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-      <distribution>repo</distribution>
-      <comments>A business-friendly OSS license</comments>
-    </license>
-  </licenses>
+    <licenses>
+        <license>
+            <name>Apache License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+            <comments>A business-friendly OSS license</comments>
+        </license>
+    </licenses>
 
-  <properties>
-    <root.dir>${basedir}/..</root.dir>
-  </properties>
+    <properties>
+        <root.dir>${basedir}/..</root.dir>
+    </properties>
 
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.rat</groupId>
-        <artifactId>apache-rat-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>default</id>
-            <phase>validate</phase>
-            <goals>
-              <goal>check</goal>
-            </goals>
-            <configuration>
-              <licenses>
-                <license implementation="org.apache.rat.analysis.license.ApacheSoftwareLicense20"/>
-              </licenses>
-              <excludes combine.children="append">
-                <exclude>src/test/resources/result/**</exclude>
-              </excludes>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>com.googlecode.maven-download-plugin</groupId>
-        <artifactId>download-maven-plugin</artifactId>
-        <version>1.4.2</version>
-        <executions>
-          <execution>
-            <id>install-fake-gcs</id>
-            <phase>${gcs.download.stage}</phase>
-            <goals>
-              <goal>wget</goal>
-            </goals>
-            <configuration>
-              <url>https://github.com/fsouza/fake-gcs-server/releases/download/v1.48.0/fake-gcs-server_1.48.0_Linux_amd64.tar.gz</url>
-              <outputFileName>fake-gcs-server_1.48.0_Linux_amd64.tar.gz</outputFileName>
-              <outputDirectory>${project.build.directory}</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-antrun-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>extract-gcs</id>
-            <phase>${gcs.install.stage}</phase>
-            <configuration>
-              <target>
-                <echo message="Extracting fake-gcs-server" />
-                <mkdir dir="${project.build.directory}/fake-gcs-server" />
-                <gunzip src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar.gz" dest="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar" />
-                <untar src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar" dest="${project.build.directory}/fake-gcs-server" />
-                <chmod file="${project.build.directory}/fake-gcs-server/fake-gcs-server" perm="ugo+rx" />
-              </target>
-            </configuration>
-            <goals>
-              <goal>run</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-      <groupId>org.codehaus.mojo</groupId>
-      <artifactId>exec-maven-plugin</artifactId>
-        <executions>
-          <execution>
-          <id>fake-gcs-server</id>
-          <phase>${gcs.stage}</phase>
-          <goals>
-            <goal>exec</goal>
-          </goals>
-          <configuration>
-            <executable>${project.build.directory}/fake-gcs-server/fake-gcs-server</executable>
-            <workingDirectory>${project.build.directory}/fake-gcs-server</workingDirectory>
-            <arguments>
-              <argument>-port</argument>
-              <argument>4443</argument>
-              <argument>-scheme</argument>
-              <argument>http</argument>
-              <argument>-host</argument>
-              <argument>127.0.0.1</argument>
-              <argument>-log-level</argument>
-              <argument>error</argument>
-              <argument>-filesystem-root</argument>
-              <argument>${project.build.directory}/fake-gcs-server/storage</argument>
-            </arguments>
-            <async>true</async>
-          </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>default</id>
+                        <phase>validate</phase>
+                        <goals>
+                            <goal>check</goal>
+                        </goals>
+                        <configuration>
+                            <licenses>
+                                <license implementation="org.apache.rat.analysis.license.ApacheSoftwareLicense20"/>
+                            </licenses>
+                            <excludes combine.children="append">
+                                <exclude>src/test/resources/result/**</exclude>
+                            </excludes>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>com.googlecode.maven-download-plugin</groupId>
+                <artifactId>download-maven-plugin</artifactId>
+                <version>1.4.2</version>
+                <executions>
+                    <execution>
+                        <id>install-fake-gcs</id>
+                        <phase>${gcs.download.stage}</phase>
+                        <goals>
+                            <goal>wget</goal>
+                        </goals>
+                        <configuration>
+                            <url>
+                                https://github.com/fsouza/fake-gcs-server/releases/download/v1.48.0/fake-gcs-server_1.48.0_Linux_amd64.tar.gz
+                            </url>
+                            <outputFileName>fake-gcs-server_1.48.0_Linux_amd64.tar.gz</outputFileName>
+                            <outputDirectory>${project.build.directory}</outputDirectory>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-antrun-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>extract-gcs</id>
+                        <phase>${gcs.install.stage}</phase>
+                        <configuration>
+                            <target>
+                                <echo message="Extracting fake-gcs-server"/>
+                                <mkdir dir="${project.build.directory}/fake-gcs-server"/>
+                                <gunzip src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar.gz"
+                                        dest="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar"/>
+                                <untar src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar"
+                                       dest="${project.build.directory}/fake-gcs-server"/>
+                                <chmod file="${project.build.directory}/fake-gcs-server/fake-gcs-server" perm="ugo+rx"/>
+                            </target>
+                        </configuration>
+                        <goals>
+                            <goal>run</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>exec-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>fake-gcs-server</id>
+                        <phase>${gcs.stage}</phase>
+                        <goals>
+                            <goal>exec</goal>
+                        </goals>
+                        <configuration>
+                            <executable>${project.build.directory}/fake-gcs-server/fake-gcs-server</executable>
+                            <workingDirectory>${project.build.directory}/fake-gcs-server</workingDirectory>
+                            <arguments>
+                                <argument>-port</argument>
+                                <argument>4443</argument>
+                                <argument>-scheme</argument>
+                                <argument>http</argument>
+                                <argument>-host</argument>
+                                <argument>127.0.0.1</argument>
+                                <argument>-log-level</argument>
+                                <argument>error</argument>
+                                <argument>-filesystem-root</argument>
+                                <argument>${project.build.directory}/fake-gcs-server/storage</argument>
+                            </arguments>
+                            <async>true</async>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>azurite</id>
+                        <phase>${azurite.stage}</phase>
+                        <goals>
+                            <goal>exec</goal>
+                        </goals>
+                        <configuration>
+                            <!--suppress UnresolvedMavenProperty -->
+                            <executable>${project.build.directory}/npm/node_modules/.bin/azurite-blob</executable>
+                            <workingDirectory>${project.build.directory}</workingDirectory>
+                            <environmentVariables>
+                                <PATH>${project.build.directory}/npm/node</PATH>
+                            </environmentVariables>
+                            <arguments>
+                                <argument>--blobPort</argument>
+                                <argument>15055</argument>
+                                <argument>--location</argument>
+                                <argument>${project.build.directory}/azurite</argument>
+                                <argument>--debug</argument>
+                                <argument>${project.build.directory}/azurite/logs/azurite-debug.log</argument>
+                            </arguments>
+                            <async>true</async>
+                            <outputFile>${project.build.directory}/azurite/logs/azurite.log</outputFile>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-cloud</artifactId>
-      <version>${hyracks.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.asterix</groupId>
-      <artifactId>asterix-common</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.asterix</groupId>
-      <artifactId>asterix-external-data</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <!-- aws s3 start -->
-    <dependency>
-      <groupId>software.amazon.awssdk</groupId>
-      <artifactId>sdk-core</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>software.amazon.awssdk</groupId>
-      <artifactId>s3</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>software.amazon.awssdk</groupId>
-      <artifactId>regions</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>software.amazon.awssdk</groupId>
-      <artifactId>auth</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>software.amazon.awssdk</groupId>
-      <artifactId>s3-transfer-manager</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>software.amazon.awssdk.crt</groupId>
-      <artifactId>aws-crt</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>io.findify</groupId>
-      <artifactId>s3mock_2.12</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>com.typesafe.akka</groupId>
-      <artifactId>akka-http-core_2.12</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <!-- aws s3 end -->
-  </dependencies>
+    <profiles>
+        <profile>
+            <id>azurite-tests</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>com.github.eirslett</groupId>
+                        <artifactId>frontend-maven-plugin</artifactId>
+                        <version>1.13.4</version>
+                        <configuration>
+                            <nodeVersion>v14.15.4</nodeVersion>
+                            <npmVersion>6.14.11</npmVersion>
+                            <workingDirectory>target/npm</workingDirectory>
+                            <installDirectory>target/npm</installDirectory>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <id>install node and yarn</id>
+                                <goals>
+                                    <goal>install-node-and-npm</goal>
+                                </goals>
+                                <phase>${azurite.npm.install.stage}</phase>
+                            </execution>
+                            <execution>
+                                <id>azurite blob</id>
+                                <phase>${azurite.install.stage}</phase>
+                                <goals>
+                                    <goal>npm</goal>
+                                </goals>
+                                <configuration>
+                                    <arguments>install azurite</arguments>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hyracks</groupId>
+            <artifactId>hyracks-cloud</artifactId>
+            <version>${hyracks.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.asterix</groupId>
+            <artifactId>asterix-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.asterix</groupId>
+            <artifactId>asterix-external-data</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <!-- aws s3 start -->
+        <dependency>
+            <groupId>software.amazon.awssdk</groupId>
+            <artifactId>sdk-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>software.amazon.awssdk</groupId>
+            <artifactId>s3</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>software.amazon.awssdk</groupId>
+            <artifactId>regions</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>software.amazon.awssdk</groupId>
+            <artifactId>auth</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>software.amazon.awssdk</groupId>
+            <artifactId>s3-transfer-manager</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>software.amazon.awssdk.crt</groupId>
+            <artifactId>aws-crt</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>io.findify</groupId>
+            <artifactId>s3mock_2.12</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe.akka</groupId>
+            <artifactId>akka-http-core_2.12</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <!-- aws s3 end -->
+
+        <dependency>
+            <groupId>com.azure</groupId>
+            <artifactId>azure-storage-blob-batch</artifactId>
+            <version>12.23.0</version>
+        </dependency>
+
+    </dependencies>
 </project>
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/AbstractCloudIOManager.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/AbstractCloudIOManager.java
index c005253..4ce382e 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/AbstractCloudIOManager.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/AbstractCloudIOManager.java
@@ -26,10 +26,12 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
+import java.nio.file.FileStore;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.function.Predicate;
 
 import org.apache.asterix.cloud.bulk.DeleteBulkCloudOperation;
 import org.apache.asterix.cloud.bulk.NoOpDeleteBulkCallBack;
@@ -51,6 +53,7 @@
 import org.apache.hyracks.api.io.IIOBulkOperation;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.api.util.IoUtil;
+import org.apache.hyracks.cloud.filesystem.PhysicalDrive;
 import org.apache.hyracks.cloud.io.ICloudIOManager;
 import org.apache.hyracks.cloud.io.request.ICloudBeforeRetryRequest;
 import org.apache.hyracks.cloud.io.request.ICloudRequest;
@@ -68,6 +71,8 @@
 public abstract class AbstractCloudIOManager extends IOManager implements IPartitionBootstrapper, ICloudIOManager {
     private static final Logger LOGGER = LogManager.getLogger();
     private static final byte[] EMPTY_FILE_BYTES = "empty".getBytes();
+    private static final Predicate<String> NO_OP_LIST_FILES_FILTER = (path) -> true;
+
     protected final ICloudClient cloudClient;
     protected final ICloudGuardian guardian;
     protected final IWriteBufferProvider writeBufferProvider;
@@ -76,6 +81,7 @@
     protected final List<FileReference> partitionPaths;
     protected final IOManager localIoManager;
     protected final INamespacePathResolver nsPathResolver;
+    private final List<FileStore> drivePaths;
 
     public AbstractCloudIOManager(IOManager ioManager, CloudProperties cloudProperties,
             INamespacePathResolver nsPathResolver, ICloudGuardian guardian) throws HyracksDataException {
@@ -90,6 +96,7 @@
         partitions = new HashSet<>();
         partitionPaths = new ArrayList<>();
         this.localIoManager = ioManager;
+        drivePaths = PhysicalDrive.getDrivePaths(ioDevices);
     }
 
     /*
@@ -468,10 +475,24 @@
     }
 
     public long getTotalRemoteStorageSizeForNodeBytes() {
-        long size = 0;
-        for (CloudFile file : list()) {
-            size += file.getSize();
+        return getSize(NO_OP_LIST_FILES_FILTER);
+    }
+
+    @Override
+    public long getSize(Predicate<String> relativePathFilter) {
+        long totalSize = localIoManager.getSize(relativePathFilter);
+
+        // get uncached files from uncached files tracker
+        for (UncachedFileReference uncachedFile : getUncachedFiles()) {
+            if (relativePathFilter.test(uncachedFile.getRelativePath())) {
+                totalSize += uncachedFile.getSize();
+            }
         }
-        return size;
+        return totalSize;
+    }
+
+    @Override
+    public long getTotalDiskUsage() {
+        return PhysicalDrive.getUsedSpace(drivePaths);
     }
 }
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java
index ee43a2c..c98c6b4 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java
@@ -20,6 +20,8 @@
 
 import org.apache.asterix.cloud.clients.aws.s3.S3ClientConfig;
 import org.apache.asterix.cloud.clients.aws.s3.S3CloudClient;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageClientConfig;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageCloudClient;
 import org.apache.asterix.cloud.clients.google.gcs.GCSClientConfig;
 import org.apache.asterix.cloud.clients.google.gcs.GCSCloudClient;
 import org.apache.asterix.common.config.CloudProperties;
@@ -30,6 +32,7 @@
     private static final boolean UNSTABLE = isUnstable();
     public static final String S3 = "s3";
     public static final String GCS = "gs";
+    public static final String AZ_BLOB = "azblob";
 
     private CloudClientProvider() {
         throw new AssertionError("do not instantiate");
@@ -45,6 +48,9 @@
         } else if (GCS.equalsIgnoreCase(storageScheme)) {
             GCSClientConfig config = GCSClientConfig.of(cloudProperties);
             cloudClient = new GCSCloudClient(config, guardian);
+        } else if (AZ_BLOB.equalsIgnoreCase(storageScheme)) {
+            AzBlobStorageClientConfig config = AzBlobStorageClientConfig.of(cloudProperties);
+            cloudClient = new AzBlobStorageCloudClient(config, guardian);
         } else {
             throw new IllegalStateException("unsupported cloud storage scheme: " + storageScheme);
         }
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageBufferedWriter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageBufferedWriter.java
new file mode 100644
index 0000000..2a79c86
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageBufferedWriter.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import java.io.BufferedInputStream;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Base64;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.cloud.clients.ICloudBufferedWriter;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
+import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
+import org.apache.commons.io.IOUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import com.azure.core.util.BinaryData;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.specialized.BlockBlobClient;
+
+public class AzBlobStorageBufferedWriter implements ICloudBufferedWriter {
+    private static final String PUT_UPLOAD_ID = "putUploadId";
+    private static final int MAX_RETRIES = 3;
+    private static final Logger LOGGER = LogManager.getLogger();
+    private final List<String> blockIDArrayList;
+    private final ICloudGuardian guardian;
+    private int blockNumber;
+    private final String path;
+    private String uploadID;
+
+    private final BlobContainerClient blobContainerClient;
+
+    private final IRequestProfilerLimiter profiler;
+
+    private final String bucket;
+
+    public AzBlobStorageBufferedWriter(BlobContainerClient blobContainerClient, IRequestProfilerLimiter profiler,
+            ICloudGuardian guardian, String bucket, String path) {
+        this.blobContainerClient = blobContainerClient;
+        this.profiler = profiler;
+        this.guardian = guardian;
+        this.bucket = bucket;
+        this.path = path;
+        this.blockIDArrayList = new ArrayList<>();
+    }
+
+    @Override
+    public void upload(InputStream stream, int length) {
+        profiler.objectMultipartUpload();
+        if (length <= 0) {
+            String errMsg = String.format("A block with size %d cannot be staged for upload", length);
+            LOGGER.error(errMsg);
+            throw new IllegalArgumentException(errMsg);
+        }
+        guardian.checkIsolatedWriteAccess(bucket, path);
+        try {
+            BlockBlobClient blockBlobClient = blobContainerClient.getBlobClient(path).getBlockBlobClient();
+            BufferedInputStream bufferedInputStream = IOUtils.buffer(stream, length);
+            String blockID =
+                    Base64.getEncoder().encodeToString(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
+            initBlockBlobUploads(blockID);
+            blockIDArrayList.add(blockID);
+            blockBlobClient.stageBlock(blockID, bufferedInputStream, length);
+        } catch (Exception e) {
+            LOGGER.error("Error while uploading blocks of data: {}", e.getMessage());
+            throw new RuntimeException(e);
+        }
+        blockNumber++;
+    }
+
+    private void initBlockBlobUploads(String blockID) {
+        if (this.uploadID == null) {
+            this.uploadID = blockID;
+            this.blockNumber = 1;
+        }
+    }
+
+    @Override
+    public void uploadLast(InputStream stream, ByteBuffer buffer) throws HyracksDataException {
+        if (uploadID == null) {
+            profiler.objectWrite();
+            BlobClient blobClient = blobContainerClient.getBlobClient(path);
+            BinaryData binaryData = BinaryData.fromBytes(getDataFromBuffer(buffer));
+            blobClient.upload(binaryData);
+            uploadID = PUT_UPLOAD_ID; // uploadID should be updated if the put-object operation succeeds
+        } else {
+            upload(stream, buffer.limit());
+        }
+    }
+
+    private byte[] getDataFromBuffer(ByteBuffer buffer) {
+        byte[] data = new byte[buffer.limit()];
+        buffer.get(data, 0, buffer.limit());
+        return data;
+    }
+
+    @Override
+    public boolean isEmpty() {
+        return this.uploadID == null;
+    }
+
+    @Override
+    public void finish() throws HyracksDataException {
+        if (this.uploadID == null) {
+            throw new IllegalStateException("Cannot finish without writing any bytes");
+        } else if (PUT_UPLOAD_ID.equals(uploadID)) {
+            return;
+        }
+        int currRetryAttempt = 0;
+        BlockBlobClient blockBlobClient = blobContainerClient.getBlobClient(path).getBlockBlobClient();
+        while (true) {
+            try {
+                guardian.checkWriteAccess(bucket, path);
+                profiler.objectMultipartUpload();
+                blockBlobClient.commitBlockList(blockIDArrayList);
+                break;
+            } catch (BlobStorageException e) {
+                currRetryAttempt++;
+                if (currRetryAttempt == MAX_RETRIES) {
+                    throw HyracksDataException.create(e);
+                }
+                LOGGER.info(() -> "AzBlob storage write retry, encountered: " + e.getMessage());
+
+                // Backoff for 1 sec for the first 2 retries, and 2 seconds from there onward
+                try {
+                    Thread.sleep(TimeUnit.SECONDS.toMillis(currRetryAttempt < 2 ? 1 : 2));
+                } catch (InterruptedException ex) {
+                    Thread.currentThread().interrupt();
+                    throw HyracksDataException.create(ex);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void abort() throws HyracksDataException {
+        // Todo: As of the current Azure Java SDK, it does not support aborting a staged or under-upload block.
+        // https://github.com/Azure/azure-sdk-for-java/issues/31150
+        LOGGER.warn("Multipart upload for {} was aborted", path);
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageClientConfig.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageClientConfig.java
new file mode 100644
index 0000000..9aedfc3
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageClientConfig.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import java.util.Objects;
+
+import org.apache.asterix.common.config.CloudProperties;
+
+import com.azure.identity.DefaultAzureCredential;
+import com.azure.identity.DefaultAzureCredentialBuilder;
+
+public class AzBlobStorageClientConfig {
+    private final int writeBufferSize;
+    // Ref: https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch?tabs=microsoft-entra-id
+    static final int DELETE_BATCH_SIZE = 256;
+    private final String region;
+    private final String endpoint;
+    private final String prefix;
+
+    private final boolean anonymousAuth;
+    private final long profilerLogInterval;
+    private final String bucket;
+    private final long tokenAcquireTimeout;
+    private final int writeMaxRequestsPerSeconds;
+    private final int readMaxRequestsPerSeconds;
+
+    public AzBlobStorageClientConfig(String region, String endpoint, String prefix, boolean anonymousAuth,
+            long profilerLogInterval, String bucket, long tokenAcquireTimeout, int writeMaxRequestsPerSeconds,
+            int readMaxRequestsPerSeconds, int writeBufferSize) {
+        this.region = Objects.requireNonNull(region, "region");
+        this.endpoint = endpoint;
+        this.prefix = Objects.requireNonNull(prefix, "prefix");
+        this.anonymousAuth = anonymousAuth;
+        this.profilerLogInterval = profilerLogInterval;
+        this.bucket = bucket;
+        this.tokenAcquireTimeout = tokenAcquireTimeout;
+        this.writeMaxRequestsPerSeconds = writeMaxRequestsPerSeconds;
+        this.readMaxRequestsPerSeconds = readMaxRequestsPerSeconds;
+        this.writeBufferSize = writeBufferSize;
+    }
+
+    public static AzBlobStorageClientConfig of(CloudProperties cloudProperties) {
+        return new AzBlobStorageClientConfig(cloudProperties.getStorageRegion(), cloudProperties.getStorageEndpoint(),
+                cloudProperties.getStoragePrefix(), cloudProperties.isStorageAnonymousAuth(),
+                cloudProperties.getProfilerLogInterval(), cloudProperties.getStorageBucket(),
+                cloudProperties.getTokenAcquireTimeout(), cloudProperties.getWriteMaxRequestsPerSecond(),
+                cloudProperties.getReadMaxRequestsPerSecond(), cloudProperties.getWriteBufferSize());
+    }
+
+    public String getRegion() {
+        return region;
+    }
+
+    public String getEndpoint() {
+        return endpoint;
+    }
+
+    public String getPrefix() {
+        return prefix;
+    }
+
+    public String getBucket() {
+        return bucket;
+    }
+
+    public long getProfilerLogInterval() {
+        return profilerLogInterval;
+    }
+
+    public boolean isAnonymousAuth() {
+        return anonymousAuth;
+    }
+
+    public DefaultAzureCredential createCredentialsProvider() {
+        return new DefaultAzureCredentialBuilder().build();
+    }
+
+    public long getTokenAcquireTimeout() {
+        return tokenAcquireTimeout;
+    }
+
+    public int getWriteMaxRequestsPerSeconds() {
+        return writeMaxRequestsPerSeconds;
+    }
+
+    public int getReadMaxRequestsPerSeconds() {
+        return readMaxRequestsPerSeconds;
+    }
+
+    public int getWriteBufferSize() {
+        return writeBufferSize;
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageCloudClient.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageCloudClient.java
new file mode 100644
index 0000000..b9f9421
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageCloudClient.java
@@ -0,0 +1,402 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import static org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageClientConfig.DELETE_BATCH_SIZE;
+
+import java.io.ByteArrayOutputStream;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+import java.nio.ReadOnlyBufferException;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.asterix.cloud.CloudResettableInputStream;
+import org.apache.asterix.cloud.IWriteBufferProvider;
+import org.apache.asterix.cloud.clients.CloudFile;
+import org.apache.asterix.cloud.clients.ICloudBufferedWriter;
+import org.apache.asterix.cloud.clients.ICloudClient;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
+import org.apache.asterix.cloud.clients.ICloudWriter;
+import org.apache.asterix.cloud.clients.IParallelDownloader;
+import org.apache.asterix.cloud.clients.profiler.CountRequestProfilerLimiter;
+import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
+import org.apache.asterix.cloud.clients.profiler.RequestLimiterNoOpProfiler;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.control.nc.io.IOManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import com.azure.core.http.rest.PagedIterable;
+import com.azure.core.util.BinaryData;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobContainerClientBuilder;
+import com.azure.storage.blob.batch.BlobBatchClient;
+import com.azure.storage.blob.batch.BlobBatchClientBuilder;
+import com.azure.storage.blob.models.BlobErrorCode;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.BlobListDetails;
+import com.azure.storage.blob.models.BlobRange;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.models.ListBlobsOptions;
+import com.azure.storage.common.StorageSharedKeyCredential;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+public class AzBlobStorageCloudClient implements ICloudClient {
+    private static final String BUCKET_ROOT_PATH = "";
+    public static final String AZURITE_ENDPOINT = "http://127.0.0.1:15055/devstoreaccount1/";
+    private static final String AZURITE_ACCOUNT_NAME = "devstoreaccount1";
+    private static final String AZURITE_ACCOUNT_KEY =
+            "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
+    private final ICloudGuardian guardian;
+    private BlobContainerClient blobContainerClient;
+    private AzBlobStorageClientConfig config;
+    private IRequestProfilerLimiter profiler;
+    private final BlobBatchClient blobBatchClient;
+    private static final Logger LOGGER = LogManager.getLogger();
+
+    public AzBlobStorageCloudClient(AzBlobStorageClientConfig config, ICloudGuardian guardian) {
+        this(config, buildClient(config), guardian);
+    }
+
+    public AzBlobStorageCloudClient(AzBlobStorageClientConfig config, BlobContainerClient blobContainerClient,
+            ICloudGuardian guardian) {
+        this.blobContainerClient = blobContainerClient;
+        this.config = config;
+        this.guardian = guardian;
+        long profilerInterval = config.getProfilerLogInterval();
+        AzureRequestRateLimiter limiter = new AzureRequestRateLimiter(config);
+        if (profilerInterval > 0) {
+            profiler = new CountRequestProfilerLimiter(profilerInterval, limiter);
+        } else {
+            profiler = new RequestLimiterNoOpProfiler(limiter);
+        }
+        guardian.setCloudClient(this);
+        blobBatchClient = new BlobBatchClientBuilder(blobContainerClient.getServiceClient()).buildClient();
+    }
+
+    @Override
+    public int getWriteBufferSize() {
+        return config.getWriteBufferSize();
+    }
+
+    @Override
+    public IRequestProfilerLimiter getProfilerLimiter() {
+        return profiler;
+    }
+
+    @Override
+    public ICloudWriter createWriter(String bucket, String path, IWriteBufferProvider bufferProvider) {
+        ICloudBufferedWriter bufferedWriter = new AzBlobStorageBufferedWriter(blobContainerClient, profiler, guardian,
+                bucket, config.getPrefix() + path);
+        return new CloudResettableInputStream(bufferedWriter, bufferProvider);
+    }
+
+    @Override
+    public Set<CloudFile> listObjects(String bucket, String path, FilenameFilter filter) {
+        guardian.checkReadAccess(bucket, path);
+        profiler.objectsList();
+        PagedIterable<BlobItem> blobItems = getBlobItems(bucket, config.getPrefix() + path);
+        Stream<CloudFile> cloudFileStream = mapBlobItemsToStreamOfCloudFiles(blobItems);
+        return filterCloudFiles(filter, cloudFileStream);
+    }
+
+    private Set<CloudFile> filterCloudFiles(FilenameFilter filter, Stream<CloudFile> cloudFileStream) {
+        if (filter == null) {
+            return cloudFileStream.map(this::removeCloudPrefixFromBlobName).collect(Collectors.toSet());
+        }
+        return cloudFileStream.filter(cloudFile -> filter.accept(null, cloudFile.getPath()))
+                .map(this::removeCloudPrefixFromBlobName).collect(Collectors.toSet());
+    }
+
+    private CloudFile removeCloudPrefixFromBlobName(CloudFile cloudFile) {
+        String fullyQualifiedBlobName = cloudFile.getPath();
+        fullyQualifiedBlobName = fullyQualifiedBlobName.substring(config.getPrefix().length());
+        return CloudFile.of(fullyQualifiedBlobName, cloudFile.getSize());
+    }
+
+    private Stream<CloudFile> mapBlobItemsToStreamOfCloudFiles(PagedIterable<BlobItem> blobItems) {
+        return blobItems.stream()
+                .map(blobItem -> CloudFile.of(blobItem.getName(), blobItem.getProperties().getContentLength()));
+    }
+
+    private PagedIterable<BlobItem> getBlobItems(String bucket, String path) {
+        ListBlobsOptions options =
+                new ListBlobsOptions().setPrefix(path).setDetails(new BlobListDetails().setRetrieveMetadata(true));
+        return blobContainerClient.listBlobs(options, null);
+    }
+
+    @Override
+    public int read(String bucket, String path, long offset, ByteBuffer buffer) throws HyracksDataException {
+        guardian.checkReadAccess(bucket, path);
+        profiler.objectGet();
+        BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+        ByteArrayOutputStream blobStream = new ByteArrayOutputStream(buffer.capacity());
+        long rem = buffer.remaining();
+        BlobRange blobRange = new BlobRange(offset, rem);
+        downloadBlob(blobClient, blobStream, blobRange);
+        readBlobStreamIntoBuffer(buffer, blobStream);
+        if (buffer.remaining() != 0)
+            throw new IllegalStateException("Expected buffer remaining = 0, found: " + buffer.remaining());
+        return ((int) rem - buffer.remaining());
+    }
+
+    private void readBlobStreamIntoBuffer(ByteBuffer buffer, ByteArrayOutputStream byteArrayOutputStream)
+            throws HyracksDataException {
+        byte[] byteArray = byteArrayOutputStream.toByteArray();
+        try {
+            buffer.put(byteArray);
+            byteArrayOutputStream.close();
+        } catch (BufferOverflowException | ReadOnlyBufferException | IOException ex) {
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    private void downloadBlob(BlobClient blobClient, ByteArrayOutputStream byteArrayOutputStream, BlobRange blobRange)
+            throws HyracksDataException {
+        try {
+            blobClient.downloadStreamWithResponse(byteArrayOutputStream, blobRange, null, null, false, null, null);
+        } catch (BlobStorageException ex) {
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    @Override
+    public byte[] readAllBytes(String bucket, String path) throws HyracksDataException {
+        guardian.checkReadAccess(bucket, path);
+        profiler.objectGet();
+        BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+        try {
+            BinaryData binaryData = blobClient.downloadContent();
+            return binaryData.toBytes();
+        } catch (BlobStorageException ex) {
+            BlobErrorCode errorCode = ex.getErrorCode();
+            if (errorCode.equals(BlobErrorCode.BLOB_NOT_FOUND)) {
+                LOGGER.warn("Blob not found on cloud: {}", path);
+                return null;
+            }
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    @Override
+    public InputStream getObjectStream(String bucket, String path, long offset, long length) {
+        guardian.checkReadAccess(bucket, path);
+        profiler.objectGet();
+        BlobRange blobRange = new BlobRange(offset, length);
+        BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+        try {
+            return blobClient.openInputStream(blobRange, null);
+        } catch (BlobStorageException ex) {
+            LOGGER.error("error getting object stream for path: {}. Exception: {}", path, ex.getMessage());
+            throw new IllegalStateException(ex);
+        }
+    }
+
+    @Override
+    public void write(String bucket, String path, byte[] data) {
+        guardian.checkWriteAccess(bucket, path);
+        profiler.objectWrite();
+        BinaryData binaryData = BinaryData.fromBytes(data);
+        BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+        blobClient.upload(binaryData, true);
+    }
+
+    @Override
+    public void copy(String bucket, String srcPath, FileReference destPath) {
+        guardian.checkReadAccess(bucket, srcPath);
+        profiler.objectGet();
+        BlobClient srcBlobClient = blobContainerClient.getBlobClient(config.getPrefix() + srcPath);
+        String srcBlobUrl = srcBlobClient.getBlobUrl();
+        profiler.objectCopy();
+        guardian.checkWriteAccess(bucket, destPath.getRelativePath());
+        BlobClient destBlobClient = blobContainerClient.getBlobClient(destPath.getFile().getPath());
+        destBlobClient.beginCopy(srcBlobUrl, null);
+    }
+
+    @Override
+    public void deleteObjects(String bucket, Collection<String> paths) {
+        if (paths.isEmpty())
+            return;
+        Set<BlobItem> blobsToDelete = getBlobsMatchingThesePaths(paths);
+        List<String> blobURLs = getBlobURLs(blobsToDelete);
+        if (blobURLs.isEmpty())
+            return;
+        Collection<List<String>> batchedBlobURLs = getBatchedBlobURLs(blobURLs);
+        for (List<String> batch : batchedBlobURLs) {
+            blobBatchClient.deleteBlobs(batch, null).stream().count();
+        }
+    }
+
+    private Collection<List<String>> getBatchedBlobURLs(List<String> blobURLs) {
+        int startIdx = 0;
+        Collection<List<String>> batchedBLOBURLs = new ArrayList<>();
+        Iterator<String> iterator = blobURLs.iterator();
+        while (iterator.hasNext()) {
+            List<String> batch = new ArrayList<>();
+            while (startIdx < DELETE_BATCH_SIZE && iterator.hasNext()) {
+                batch.add(iterator.next());
+                startIdx++;
+            }
+            batchedBLOBURLs.add(batch);
+            startIdx = 0;
+        }
+        return batchedBLOBURLs;
+    }
+
+    private Set<BlobItem> getBlobsMatchingThesePaths(Collection<String> paths) {
+        List<String> pathWithPrefix =
+                paths.stream().map(path -> config.getPrefix() + path).collect(Collectors.toList());
+        PagedIterable<BlobItem> blobItems = blobContainerClient.listBlobs();
+        return blobItems.stream().filter(blobItem -> pathWithPrefix.contains(blobItem.getName()))
+                .collect(Collectors.toSet());
+    }
+
+    @Override
+    public long getObjectSize(String bucket, String path) throws HyracksDataException {
+        guardian.checkReadAccess(bucket, path);
+        profiler.objectGet();
+        try {
+            BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+            return blobClient.getProperties().getBlobSize();
+        } catch (BlobStorageException ex) {
+            BlobErrorCode errorCode = ex.getErrorCode();
+            if (errorCode.equals(BlobErrorCode.BLOB_NOT_FOUND)) {
+                LOGGER.error("error while getting blob size; no such blob found: {} ", config.getPrefix() + path);
+                return 0;
+            }
+            throw HyracksDataException.create(ex);
+        } catch (Exception ex) {
+            LOGGER.error("error getting size of the blob: {}. Exception: {}", path, ex.getMessage());
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    @Override
+    public boolean exists(String bucket, String path) throws HyracksDataException {
+        guardian.checkReadAccess(bucket, path);
+        profiler.objectGet();
+        try {
+            BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+            return blobClient.exists();
+        } catch (BlobStorageException ex) {
+            BlobErrorCode errorCode = ex.getErrorCode();
+            if (errorCode.equals(BlobErrorCode.BLOB_NOT_FOUND)) {
+                return false;
+            }
+            throw HyracksDataException.create(ex);
+        } catch (Exception ex) {
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    @Override
+    public boolean isEmptyPrefix(String bucket, String path) throws HyracksDataException {
+        profiler.objectsList();
+        ListBlobsOptions listBlobsOptions = new ListBlobsOptions().setPrefix(config.getPrefix() + path);
+        //MAX_VALUE below represents practically no timeout
+        PagedIterable<BlobItem> blobItems =
+                blobContainerClient.listBlobs(listBlobsOptions, Duration.ofDays(Long.MAX_VALUE));
+        return blobItems.stream().findAny().isEmpty();
+    }
+
+    @Override
+    public IParallelDownloader createParallelDownloader(String bucket, IOManager ioManager) {
+        return new AzureParallelDownloader(ioManager, blobContainerClient, profiler, config);
+    }
+
+    @Override
+    public JsonNode listAsJson(ObjectMapper objectMapper, String bucket) {
+        profiler.objectsList();
+        PagedIterable<BlobItem> blobItems = getBlobItems(bucket, BUCKET_ROOT_PATH);
+        List<BlobItem> blobs = blobItems.stream().distinct().collect(Collectors.toList());
+        blobs = sortBlobItemsByName(blobs);
+        return mapBlobItemsToJson(blobs, objectMapper);
+    }
+
+    private List<BlobItem> sortBlobItemsByName(List<BlobItem> blobs) {
+        return blobs.stream()
+                .sorted((blob1, blob2) -> String.CASE_INSENSITIVE_ORDER.compare(blob1.getName(), blob2.getName()))
+                .collect(Collectors.toList());
+    }
+
+    private ArrayNode mapBlobItemsToJson(List<BlobItem> blobs, ObjectMapper objectMapper) {
+        ArrayNode objectsInfo = objectMapper.createArrayNode();
+        for (BlobItem blob : blobs) {
+            ObjectNode objectInfo = objectsInfo.addObject();
+            objectInfo.put("path", blob.getName());
+            objectInfo.put("size", blob.getProperties().getContentLength());
+        }
+        return objectsInfo;
+    }
+
+    @Override
+    public void close() {
+        // Closing Azure Blob Clients is not required as the underlying netty connection pool
+        // handles the same for the apps.
+        // Ref: https://github.com/Azure/azure-sdk-for-java/issues/17903
+        // Hence this implementation is a no op.
+    }
+
+    private static BlobContainerClient buildClient(AzBlobStorageClientConfig config) {
+        BlobContainerClientBuilder blobContainerClientBuilder =
+                new BlobContainerClientBuilder().containerName(config.getBucket()).endpoint(getEndpoint(config));
+        configCredentialsToAzClient(blobContainerClientBuilder, config);
+        BlobContainerClient blobContainerClient = blobContainerClientBuilder.buildClient();
+        blobContainerClient.createIfNotExists();
+        return blobContainerClient;
+    }
+
+    private static void configCredentialsToAzClient(BlobContainerClientBuilder builder,
+            AzBlobStorageClientConfig config) {
+        if (config.isAnonymousAuth()) {
+            StorageSharedKeyCredential creds =
+                    new StorageSharedKeyCredential(AZURITE_ACCOUNT_NAME, AZURITE_ACCOUNT_KEY);
+            builder.credential(creds);
+        } else {
+            builder.credential(config.createCredentialsProvider());
+        }
+    }
+
+    private static String getEndpoint(AzBlobStorageClientConfig config) {
+        return config.isAnonymousAuth() ? AZURITE_ENDPOINT + config.getBucket()
+                : config.getEndpoint() + "/" + config.getBucket();
+    }
+
+    private List<String> getBlobURLs(Set<BlobItem> blobs) {
+        final String blobURLPrefix = blobContainerClient.getBlobContainerUrl() + "/";
+        return blobs.stream().map(BlobItem::getName).map(blobName -> blobURLPrefix + blobName)
+                .collect(Collectors.toList());
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureParallelDownloader.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureParallelDownloader.java
new file mode 100644
index 0000000..4980587
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureParallelDownloader.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.InvalidPathException;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.asterix.cloud.clients.IParallelDownloader;
+import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.control.nc.io.IOManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import com.azure.core.http.rest.PagedIterable;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.ListBlobsOptions;
+
+public class AzureParallelDownloader implements IParallelDownloader {
+    public static final String STORAGE_SUB_DIR = "storage";
+    private final IOManager ioManager;
+    private final BlobContainerClient blobContainerClient;
+    private final IRequestProfilerLimiter profiler;
+    private final AzBlobStorageClientConfig config;
+    private static final Logger LOGGER = LogManager.getLogger();
+
+    public AzureParallelDownloader(IOManager ioManager, BlobContainerClient blobContainerClient,
+            IRequestProfilerLimiter profiler, AzBlobStorageClientConfig config) {
+        this.ioManager = ioManager;
+        this.blobContainerClient = blobContainerClient;
+        this.profiler = profiler;
+        this.config = config;
+    }
+
+    @Override
+    public void downloadFiles(Collection<FileReference> toDownload) throws HyracksDataException {
+        for (FileReference fileReference : toDownload) {
+            BlobClient blobClient =
+                    blobContainerClient.getBlobClient(config.getPrefix() + fileReference.getRelativePath());
+            Path absPath = Path.of(fileReference.getAbsolutePath());
+            Path parentPath = absPath.getParent();
+            OutputStream fileOutputStream = null;
+            try {
+                createDirectories(parentPath);
+                fileOutputStream = Files.newOutputStream(absPath);
+                blobClient.downloadStream(fileOutputStream);
+                fileOutputStream.close();
+            } catch (IOException e) {
+                throw HyracksDataException.create(e);
+            } finally {
+                closeOutputStream(fileOutputStream);
+            }
+        }
+    }
+
+    private static void closeOutputStream(OutputStream fileOutputStream) throws HyracksDataException {
+        if (fileOutputStream != null) {
+            try {
+                fileOutputStream.close();
+            } catch (IOException e) {
+                throw HyracksDataException.create(e);
+            }
+        }
+    }
+
+    @Override
+    public Collection<FileReference> downloadDirectories(Collection<FileReference> directories)
+            throws HyracksDataException {
+        Set<FileReference> failedFiles = new HashSet<>();
+        for (FileReference directory : directories) {
+            PagedIterable<BlobItem> blobsInDir = getBlobItems(directory);
+            for (BlobItem blobItem : blobsInDir) {
+                profiler.objectGet();
+                download(blobItem, failedFiles);
+            }
+        }
+        return failedFiles;
+    }
+
+    private void download(BlobItem blobItem, Set<FileReference> failedFiles) throws HyracksDataException {
+        BlobClient blobClient = blobContainerClient.getBlobClient(blobItem.getName());
+        FileReference diskDestFile = ioManager.resolve(createDiskSubPath(blobItem.getName()));
+        Path absDiskBlobPath = getDiskDestPath(diskDestFile);
+        Path parentDiskPath = absDiskBlobPath.getParent();
+        createDirectories(parentDiskPath);
+        FileOutputStream outputStreamToDest = getOutputStreamToDest(diskDestFile);
+        try {
+            blobClient.downloadStream(outputStreamToDest);
+        } catch (Exception e) {
+            FileReference failedFile = ioManager.resolve(blobItem.getName());
+            failedFiles.add(failedFile);
+        }
+    }
+
+    private String createDiskSubPath(String blobName) {
+        if (!blobName.startsWith(STORAGE_SUB_DIR)) {
+            blobName = blobName.substring(blobName.indexOf(STORAGE_SUB_DIR));
+        }
+        return blobName;
+    }
+
+    private FileOutputStream getOutputStreamToDest(FileReference destFile) throws HyracksDataException {
+        try {
+            return new FileOutputStream(destFile.getAbsolutePath());
+        } catch (FileNotFoundException ex) {
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    private void createDirectories(Path parentPath) throws HyracksDataException {
+        if (Files.notExists(parentPath))
+            try {
+                Files.createDirectories(parentPath);
+            } catch (IOException ex) {
+                throw HyracksDataException.create(ex);
+            }
+    }
+
+    private Path getDiskDestPath(FileReference destFile) throws HyracksDataException {
+        try {
+            return Path.of(destFile.getAbsolutePath());
+        } catch (InvalidPathException ex) {
+            throw HyracksDataException.create(ex);
+        }
+    }
+
+    private PagedIterable<BlobItem> getBlobItems(FileReference directoryToDownload) {
+        ListBlobsOptions listBlobsOptions =
+                new ListBlobsOptions().setPrefix(config.getPrefix() + directoryToDownload.getRelativePath());
+        return blobContainerClient.listBlobs(listBlobsOptions, null);
+    }
+
+    @Override
+    public void close() {
+        // Closing Azure Blob Clients is not required as the underlying netty connection pool
+        // handles the same for the apps.
+        // Ref: https://github.com/Azure/azure-sdk-for-java/issues/17903
+        // Hence this implementation is a no op.
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureRequestRateLimiter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureRequestRateLimiter.java
new file mode 100644
index 0000000..6a76952
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureRequestRateLimiter.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import org.apache.asterix.cloud.clients.profiler.limiter.IRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.IRequestRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.NoOpRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.TokenBasedRateLimiter;
+
+public final class AzureRequestRateLimiter implements IRequestRateLimiter {
+    private final IRateLimiter writeLimiter;
+    private final IRateLimiter readLimiter;
+
+    public AzureRequestRateLimiter(AzBlobStorageClientConfig config) {
+        long tokenAcquireTimeout = config.getTokenAcquireTimeout();
+        this.writeLimiter = createLimiter(config.getWriteMaxRequestsPerSeconds(), tokenAcquireTimeout);
+        this.readLimiter = createLimiter(config.getReadMaxRequestsPerSeconds(), tokenAcquireTimeout);
+    }
+
+    @Override
+    public void writeRequest() {
+        writeLimiter.acquire();
+    }
+
+    @Override
+    public void readRequest() {
+        readLimiter.acquire();
+    }
+
+    @Override
+    public void listRequest() {
+        readLimiter.acquire();
+    }
+
+    private static IRateLimiter createLimiter(int maxRequestsPerSecond, long tokeAcquireTimeout) {
+        if (maxRequestsPerSecond > 0) {
+            return new TokenBasedRateLimiter(maxRequestsPerSecond, tokeAcquireTimeout);
+        }
+        return NoOpRateLimiter.INSTANCE;
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java
index 4edb7a7..e4e471d 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java
@@ -19,52 +19,68 @@
 package org.apache.asterix.cloud.clients.google.gcs;
 
 import static org.apache.asterix.external.util.google.gcs.GCSConstants.ENDPOINT_FIELD_NAME;
+import static org.apache.asterix.external.util.google.gcs.GCSConstants.STORAGE_PREFIX;
 
 import java.io.IOException;
 import java.util.Map;
 
 import org.apache.asterix.common.config.CloudProperties;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.util.StorageUtil;
 
 import com.google.auth.oauth2.GoogleCredentials;
 import com.google.auth.oauth2.OAuth2Credentials;
 import com.google.cloud.NoCredentials;
 
 public class GCSClientConfig {
-    public static final int WRITE_BUFFER_SIZE = StorageUtil.getIntSizeInBytes(1, StorageUtil.StorageUnit.MEGABYTE);
+
     // The maximum number of files that can be deleted (GCS restriction): https://cloud.google.com/storage/quotas#json-requests
     static final int DELETE_BATCH_SIZE = 100;
     private final String region;
     private final String endpoint;
-    private final String prefix;
     private final boolean anonymousAuth;
     private final long profilerLogInterval;
+    private final long tokenAcquireTimeout;
+    private final int readMaxRequestsPerSeconds;
+    private final int writeMaxRequestsPerSeconds;
+    private final int writeBufferSize;
+    private final String prefix;
 
-    public GCSClientConfig(String region, String endpoint, String prefix, boolean anonymousAuth,
-            long profilerLogInterval) {
+    private GCSClientConfig(String region, String endpoint, boolean anonymousAuth, long profilerLogInterval,
+            long tokenAcquireTimeout, int writeMaxRequestsPerSeconds, int readMaxRequestsPerSeconds,
+            int writeBufferSize, String prefix) {
         this.region = region;
         this.endpoint = endpoint;
-        this.prefix = prefix;
         this.anonymousAuth = anonymousAuth;
         this.profilerLogInterval = profilerLogInterval;
+        this.tokenAcquireTimeout = tokenAcquireTimeout;
+        this.writeMaxRequestsPerSeconds = writeMaxRequestsPerSeconds;
+        this.readMaxRequestsPerSeconds = readMaxRequestsPerSeconds;
+        this.writeBufferSize = writeBufferSize;
+        this.prefix = prefix;
+    }
+
+    public GCSClientConfig(String region, String endpoint, boolean anonymousAuth, long profilerLogInterval,
+            int writeBufferSize, String prefix) {
+        this(region, endpoint, anonymousAuth, profilerLogInterval, 1, 0, 0, writeBufferSize, prefix);
     }
 
     public static GCSClientConfig of(CloudProperties cloudProperties) {
         return new GCSClientConfig(cloudProperties.getStorageRegion(), cloudProperties.getStorageEndpoint(),
-                cloudProperties.getStoragePrefix(), cloudProperties.isStorageAnonymousAuth(),
-                cloudProperties.getProfilerLogInterval());
+                cloudProperties.isStorageAnonymousAuth(), cloudProperties.getProfilerLogInterval(),
+                cloudProperties.getTokenAcquireTimeout(), cloudProperties.getWriteMaxRequestsPerSecond(),
+                cloudProperties.getReadMaxRequestsPerSecond(), cloudProperties.getWriteBufferSize(),
+                cloudProperties.getStoragePrefix());
     }
 
-    public static GCSClientConfig of(Map<String, String> configuration) {
+    public static GCSClientConfig of(Map<String, String> configuration, int writeBufferSize) {
         String endPoint = configuration.getOrDefault(ENDPOINT_FIELD_NAME, "");
         long profilerLogInterval = 0;
 
         String region = "";
-        String prefix = "";
+        String prefix = configuration.getOrDefault(STORAGE_PREFIX, "");
         boolean anonymousAuth = false;
 
-        return new GCSClientConfig(region, endPoint, prefix, anonymousAuth, profilerLogInterval);
+        return new GCSClientConfig(region, endPoint, anonymousAuth, profilerLogInterval, writeBufferSize, prefix);
     }
 
     public String getRegion() {
@@ -75,10 +91,6 @@
         return endpoint;
     }
 
-    public String getPrefix() {
-        return prefix;
-    }
-
     public long getProfilerLogInterval() {
         return profilerLogInterval;
     }
@@ -94,4 +106,24 @@
             throw HyracksDataException.create(e);
         }
     }
+
+    public long getTokenAcquireTimeout() {
+        return tokenAcquireTimeout;
+    }
+
+    public int getWriteMaxRequestsPerSeconds() {
+        return writeMaxRequestsPerSeconds;
+    }
+
+    public int getReadMaxRequestsPerSeconds() {
+        return readMaxRequestsPerSeconds;
+    }
+
+    public int getWriteBufferSize() {
+        return writeBufferSize;
+    }
+
+    public String getPrefix() {
+        return prefix;
+    }
 }
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java
index de242bd..62ca4ec 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java
@@ -40,10 +40,10 @@
 import org.apache.asterix.cloud.clients.IParallelDownloader;
 import org.apache.asterix.cloud.clients.profiler.CountRequestProfilerLimiter;
 import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
-import org.apache.asterix.cloud.clients.profiler.NoOpRequestProfilerLimiter;
-import org.apache.asterix.cloud.clients.profiler.limiter.NoOpRequestLimiter;
+import org.apache.asterix.cloud.clients.profiler.RequestLimiterNoOpProfiler;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.api.util.CleanupUtils;
 import org.apache.hyracks.api.util.IoUtil;
 import org.apache.hyracks.control.nc.io.IOManager;
 
@@ -68,16 +68,19 @@
     private final GCSClientConfig config;
     private final ICloudGuardian guardian;
     private final IRequestProfilerLimiter profilerLimiter;
+    private final int writeBufferSize;
 
     public GCSCloudClient(GCSClientConfig config, Storage gcsClient, ICloudGuardian guardian) {
         this.gcsClient = gcsClient;
         this.config = config;
         this.guardian = guardian;
+        this.writeBufferSize = config.getWriteBufferSize();
         long profilerInterval = config.getProfilerLogInterval();
+        GCSRequestRateLimiter limiter = new GCSRequestRateLimiter(config);
         if (profilerInterval > 0) {
-            profilerLimiter = new CountRequestProfilerLimiter(profilerInterval, NoOpRequestLimiter.INSTANCE);
+            profilerLimiter = new CountRequestProfilerLimiter(profilerInterval, limiter);
         } else {
-            profilerLimiter = NoOpRequestProfilerLimiter.INSTANCE;
+            profilerLimiter = new RequestLimiterNoOpProfiler(limiter);
         }
         guardian.setCloudClient(this);
     }
@@ -88,7 +91,7 @@
 
     @Override
     public int getWriteBufferSize() {
-        return GCSClientConfig.WRITE_BUFFER_SIZE;
+        return writeBufferSize;
     }
 
     @Override
@@ -98,20 +101,20 @@
 
     @Override
     public ICloudWriter createWriter(String bucket, String path, IWriteBufferProvider bufferProvider) {
-        return new GCSWriter(bucket, path, gcsClient, profilerLimiter);
+        return new GCSWriter(bucket, config.getPrefix() + path, gcsClient, profilerLimiter, guardian, writeBufferSize);
     }
 
     @Override
     public Set<CloudFile> listObjects(String bucket, String path, FilenameFilter filter) {
         guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectsList();
-        Page<Blob> blobs =
-                gcsClient.list(bucket, BlobListOption.prefix(path), BlobListOption.fields(Storage.BlobField.SIZE));
+        Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(config.getPrefix() + path),
+                BlobListOption.fields(Storage.BlobField.SIZE));
 
         Set<CloudFile> files = new HashSet<>();
         for (Blob blob : blobs.iterateAll()) {
             if (filter.accept(null, IoUtil.getFileNameFromPath(blob.getName()))) {
-                files.add(CloudFile.of(blob.getName(), blob.getSize()));
+                files.add(CloudFile.of(stripCloudPrefix(blob.getName()), blob.getSize()));
             }
         }
         return files;
@@ -119,8 +122,9 @@
 
     @Override
     public int read(String bucket, String path, long offset, ByteBuffer buffer) throws HyracksDataException {
+        guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectGet();
-        BlobId blobId = BlobId.of(bucket, path);
+        BlobId blobId = BlobId.of(bucket, config.getPrefix() + path);
         long readTo = offset + buffer.remaining();
         int totalRead = 0;
         try (ReadChannel from = gcsClient.reader(blobId).limit(readTo)) {
@@ -140,8 +144,9 @@
 
     @Override
     public byte[] readAllBytes(String bucket, String path) {
+        guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectGet();
-        BlobId blobId = BlobId.of(bucket, path);
+        BlobId blobId = BlobId.of(bucket, config.getPrefix() + path);
         try {
             return gcsClient.readAllBytes(blobId);
         } catch (StorageException e) {
@@ -151,12 +156,15 @@
 
     @Override
     public InputStream getObjectStream(String bucket, String path, long offset, long length) {
+        guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectGet();
-        try (ReadChannel reader = gcsClient.reader(bucket, path).limit(offset + length)) {
+        ReadChannel reader = null;
+        try {
+            reader = gcsClient.reader(bucket, config.getPrefix() + path).limit(offset + length);
             reader.seek(offset);
             return Channels.newInputStream(reader);
-        } catch (StorageException | IOException e) {
-            throw new IllegalStateException(e);
+        } catch (StorageException | IOException ex) {
+            throw new RuntimeException(CleanupUtils.close(reader, ex));
         }
     }
 
@@ -164,14 +172,15 @@
     public void write(String bucket, String path, byte[] data) {
         guardian.checkWriteAccess(bucket, path);
         profilerLimiter.objectWrite();
-        BlobInfo blobInfo = BlobInfo.newBuilder(bucket, path).build();
+        BlobInfo blobInfo = BlobInfo.newBuilder(bucket, config.getPrefix() + path).build();
         gcsClient.create(blobInfo, data);
     }
 
     @Override
     public void copy(String bucket, String srcPath, FileReference destPath) {
-        Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(srcPath));
+        guardian.checkReadAccess(bucket, srcPath);
         profilerLimiter.objectsList();
+        Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(config.getPrefix() + srcPath));
         for (Blob blob : blobs.iterateAll()) {
             profilerLimiter.objectCopy();
             BlobId source = blob.getBlobId();
@@ -194,7 +203,7 @@
         while (pathIter.hasNext()) {
             batchRequest = gcsClient.batch();
             for (int i = 0; pathIter.hasNext() && i < DELETE_BATCH_SIZE; i++) {
-                BlobId blobId = BlobId.of(bucket, pathIter.next());
+                BlobId blobId = BlobId.of(bucket, config.getPrefix() + pathIter.next());
                 guardian.checkWriteAccess(bucket, blobId.getName());
                 batchRequest.delete(blobId);
             }
@@ -208,7 +217,8 @@
     public long getObjectSize(String bucket, String path) {
         guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectGet();
-        Blob blob = gcsClient.get(bucket, path, Storage.BlobGetOption.fields(Storage.BlobField.SIZE));
+        Blob blob =
+                gcsClient.get(bucket, config.getPrefix() + path, Storage.BlobGetOption.fields(Storage.BlobField.SIZE));
         if (blob == null) {
             return 0;
         }
@@ -219,7 +229,8 @@
     public boolean exists(String bucket, String path) {
         guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectGet();
-        Blob blob = gcsClient.get(bucket, path, Storage.BlobGetOption.fields(Storage.BlobField.values()));
+        Blob blob = gcsClient.get(bucket, config.getPrefix() + path,
+                Storage.BlobGetOption.fields(Storage.BlobField.values()));
         return blob != null && blob.exists();
     }
 
@@ -227,7 +238,7 @@
     public boolean isEmptyPrefix(String bucket, String path) {
         guardian.checkReadAccess(bucket, path);
         profilerLimiter.objectsList();
-        Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(path));
+        Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(config.getPrefix() + path));
         return !blobs.hasNextPage();
     }
 
@@ -272,4 +283,8 @@
         }
         return builder.build().getService();
     }
-}
+
+    private String stripCloudPrefix(String objectName) {
+        return objectName.substring(config.getPrefix().length());
+    }
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java
index 0994cea..0d30120 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java
@@ -56,6 +56,7 @@
     private final Storage gcsClient;
     private final TransferManager transferManager;
     private final IRequestProfilerLimiter profiler;
+    private final GCSClientConfig config;
 
     public GCSParallelDownloader(String bucket, IOManager ioManager, GCSClientConfig config,
             IRequestProfilerLimiter profiler) throws HyracksDataException {
@@ -70,18 +71,21 @@
         this.gcsClient = builder.build().getService();
         this.transferManager =
                 TransferManagerConfig.newBuilder().setStorageOptions(builder.build()).build().getService();
+        this.config = config;
     }
 
     @Override
     public void downloadFiles(Collection<FileReference> toDownload) throws HyracksDataException {
-        ParallelDownloadConfig.Builder config = ParallelDownloadConfig.newBuilder().setBucketName(bucket);
+        ParallelDownloadConfig.Builder downConfig =
+                ParallelDownloadConfig.newBuilder().setBucketName(bucket).setStripPrefix(this.config.getPrefix());
+
         Map<Path, List<BlobInfo>> pathListMap = new HashMap<>();
         try {
             for (FileReference fileReference : toDownload) {
                 profiler.objectGet();
                 FileUtils.createParentDirectories(fileReference.getFile());
-                addToMap(pathListMap, fileReference.getDeviceHandle().getMount().toPath(),
-                        BlobInfo.newBuilder(BlobId.of(bucket, fileReference.getRelativePath())).build());
+                addToMap(pathListMap, fileReference.getDeviceHandle().getMount().toPath(), BlobInfo
+                        .newBuilder(BlobId.of(bucket, config.getPrefix() + fileReference.getRelativePath())).build());
             }
         } catch (IOException e) {
             throw HyracksDataException.create(e);
@@ -89,7 +93,7 @@
         List<DownloadJob> downloadJobs = new ArrayList<>(pathListMap.size());
         for (Map.Entry<Path, List<BlobInfo>> entry : pathListMap.entrySet()) {
             downloadJobs.add(transferManager.downloadBlobs(entry.getValue(),
-                    config.setDownloadDirectory(entry.getKey()).build()));
+                    downConfig.setDownloadDirectory(entry.getKey()).build()));
         }
         downloadJobs.forEach(DownloadJob::getDownloadResults);
     }
@@ -98,20 +102,22 @@
     public Collection<FileReference> downloadDirectories(Collection<FileReference> toDownload)
             throws HyracksDataException {
         Set<FileReference> failedFiles = new HashSet<>();
-        ParallelDownloadConfig.Builder config = ParallelDownloadConfig.newBuilder().setBucketName(bucket);
+        ParallelDownloadConfig.Builder config =
+                ParallelDownloadConfig.newBuilder().setBucketName(bucket).setStripPrefix(this.config.getPrefix());
 
         Map<Path, List<BlobInfo>> pathListMap = new HashMap<>();
         for (FileReference fileReference : toDownload) {
             profiler.objectMultipartDownload();
-            Page<Blob> blobs = gcsClient.list(bucket, Storage.BlobListOption.prefix(fileReference.getRelativePath()));
+            Page<Blob> blobs = gcsClient.list(bucket,
+                    Storage.BlobListOption.prefix(this.config.getPrefix() + fileReference.getRelativePath()));
             for (Blob blob : blobs.iterateAll()) {
                 addToMap(pathListMap, fileReference.getDeviceHandle().getMount().toPath(), blob.asBlobInfo());
             }
         }
         List<DownloadJob> downloadJobs = new ArrayList<>(pathListMap.size());
         for (Map.Entry<Path, List<BlobInfo>> entry : pathListMap.entrySet()) {
-            downloadJobs.add(transferManager.downloadBlobs(entry.getValue(),
-                    config.setDownloadDirectory(entry.getKey()).build()));
+            ParallelDownloadConfig parallelDownloadConfig = config.setDownloadDirectory(entry.getKey()).build();
+            downloadJobs.add(transferManager.downloadBlobs(entry.getValue(), parallelDownloadConfig));
         }
         List<DownloadResult> results;
         for (DownloadJob job : downloadJobs) {
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSRequestRateLimiter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSRequestRateLimiter.java
new file mode 100644
index 0000000..71f6b8c
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSRequestRateLimiter.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.clients.google.gcs;
+
+import org.apache.asterix.cloud.clients.profiler.limiter.IRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.IRequestRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.NoOpRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.TokenBasedRateLimiter;
+
+public class GCSRequestRateLimiter implements IRequestRateLimiter {
+    private final IRateLimiter writeLimiter;
+    private final IRateLimiter readLimiter;
+
+    public GCSRequestRateLimiter(GCSClientConfig config) {
+        long tokenAcquireTimeout = config.getTokenAcquireTimeout();
+        this.writeLimiter = createLimiter(config.getWriteMaxRequestsPerSeconds(), tokenAcquireTimeout);
+        this.readLimiter = createLimiter(config.getReadMaxRequestsPerSeconds(), tokenAcquireTimeout);
+    }
+
+    @Override
+    public void writeRequest() {
+        writeLimiter.acquire();
+    }
+
+    @Override
+    public void readRequest() {
+        readLimiter.acquire();
+    }
+
+    @Override
+    public void listRequest() {
+        readLimiter.acquire();
+    }
+
+    private static IRateLimiter createLimiter(int maxRequestsPerSecond, long tokeAcquireTimeout) {
+        if (maxRequestsPerSecond > 0) {
+            return new TokenBasedRateLimiter(maxRequestsPerSecond, tokeAcquireTimeout);
+        }
+        return NoOpRateLimiter.INSTANCE;
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java
index 41d1a71..8d68f01 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java
@@ -18,11 +18,10 @@
  */
 package org.apache.asterix.cloud.clients.google.gcs;
 
-import static org.apache.asterix.cloud.clients.google.gcs.GCSClientConfig.WRITE_BUFFER_SIZE;
-
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.asterix.cloud.clients.ICloudGuardian;
 import org.apache.asterix.cloud.clients.ICloudWriter;
 import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -40,14 +39,20 @@
     private final String path;
     private final IRequestProfilerLimiter profiler;
     private final Storage gcsClient;
+    private final ICloudGuardian guardian;
+    private final int writeBufferSize;
+
     private WriteChannel writer = null;
     private long writtenBytes;
 
-    public GCSWriter(String bucket, String path, Storage gcsClient, IRequestProfilerLimiter profiler) {
+    public GCSWriter(String bucket, String path, Storage gcsClient, IRequestProfilerLimiter profiler,
+            ICloudGuardian guardian, int writeBufferSize) {
         this.bucket = bucket;
         this.path = path;
         this.profiler = profiler;
         this.gcsClient = gcsClient;
+        this.guardian = guardian;
+        this.writeBufferSize = writeBufferSize;
         writtenBytes = 0;
     }
 
@@ -58,6 +63,7 @@
 
     @Override
     public int write(ByteBuffer page) throws HyracksDataException {
+        guardian.checkIsolatedWriteAccess(bucket, path);
         profiler.objectMultipartUpload();
         setUploadId();
         int written = 0;
@@ -93,6 +99,7 @@
 
     @Override
     public void finish() throws HyracksDataException {
+        guardian.checkWriteAccess(bucket, path);
         setUploadId();
         profiler.objectMultipartUpload();
         try {
@@ -115,7 +122,7 @@
     private void setUploadId() {
         if (writer == null) {
             writer = gcsClient.writer(BlobInfo.newBuilder(BlobId.of(bucket, path)).build());
-            writer.setChunkSize(WRITE_BUFFER_SIZE);
+            writer.setChunkSize(writeBufferSize);
             writtenBytes = 0;
             log("STARTED");
         }
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java
index 91de5ae..e50559f 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java
@@ -52,7 +52,7 @@
 
         // Running a debug hole puncher on a non-Linux box
         String osName = FileSystemOperationDispatcherUtil.getOSName();
-        LOGGER.warn("Using 'DebugHolePuncher' as the OS '{}' does not support punishing holes", osName);
+        LOGGER.warn("Using 'DebugHolePuncher' as the OS '{}' does not support punching holes", osName);
         return new DebugHolePuncher(cloudIOManager, bufferProvider);
     }
 
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetExternalWriterFactory.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetExternalWriterFactory.java
new file mode 100644
index 0000000..d754068
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetExternalWriterFactory.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.parquet;
+
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaLazyVisitor.generateSchema;
+
+import java.io.Serializable;
+
+import org.apache.asterix.external.writer.printer.ParquetExternalFilePrinterFactory;
+import org.apache.asterix.external.writer.printer.parquet.ParquetSchemaTree;
+import org.apache.asterix.runtime.writer.ExternalFileWriter;
+import org.apache.asterix.runtime.writer.IExternalFileWriter;
+import org.apache.asterix.runtime.writer.IExternalFileWriterFactory;
+import org.apache.asterix.runtime.writer.IPathResolver;
+import org.apache.hyracks.algebricks.runtime.writers.IExternalWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.parquet.schema.MessageType;
+
+public class ParquetExternalWriterFactory implements Serializable {
+
+    private static final long serialVersionUID = 8971234908711236L;
+    private final IExternalFileWriterFactory writerFactory;
+    private final int maxResult;
+    private final ParquetExternalFilePrinterFactory printerFactory;
+    private final IPathResolver resolver;
+    private final IHyracksTaskContext ctx;
+
+    public ParquetExternalWriterFactory(IHyracksTaskContext ctx, IExternalFileWriterFactory writerFactory,
+            int maxResult, ParquetExternalFilePrinterFactory printerFactory, IPathResolver resolver) {
+        this.ctx = ctx;
+        this.writerFactory = writerFactory;
+        this.maxResult = maxResult;
+        this.printerFactory = printerFactory;
+        this.resolver = resolver;
+    }
+
+    public IExternalWriter createWriter(ParquetSchemaTree.SchemaNode schemaNode) throws HyracksDataException {
+        MessageType schema = generateSchema(schemaNode);
+        printerFactory.setParquetSchemaString(schema.toString());
+        IExternalFileWriter writer = writerFactory.createWriter(ctx, printerFactory);
+        return new ExternalFileWriter(resolver, writer, maxResult);
+    }
+
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSchemaInferPoolWriter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSchemaInferPoolWriter.java
new file mode 100644
index 0000000..b500cbe
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSchemaInferPoolWriter.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.parquet;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.external.writer.printer.parquet.ISchemaChecker;
+import org.apache.asterix.external.writer.printer.parquet.ParquetSchemaLazyVisitor;
+import org.apache.asterix.external.writer.printer.parquet.ParquetSchemaTree;
+import org.apache.hyracks.algebricks.runtime.writers.IExternalWriter;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class ParquetSchemaInferPoolWriter {
+    private final ParquetExternalWriterFactory writerFactory;
+
+    private List<ParquetSchemaTree.SchemaNode> schemaNodes;
+    private List<IExternalWriter> writerList;
+    private final int maxSchemas;
+    private ISchemaChecker schemaChecker;
+    private ParquetSchemaLazyVisitor schemaLazyVisitor;
+
+    public ParquetSchemaInferPoolWriter(ParquetExternalWriterFactory writerFactory, ISchemaChecker schemaChecker,
+            ParquetSchemaLazyVisitor parquetSchemaLazyVisitor, int maxSchemas) {
+        this.writerFactory = writerFactory;
+        this.schemaChecker = schemaChecker;
+        this.schemaLazyVisitor = parquetSchemaLazyVisitor;
+        this.maxSchemas = maxSchemas;
+        this.schemaNodes = new ArrayList<>();
+        this.writerList = new ArrayList<>();
+    }
+
+    public void inferSchema(IValueReference value) throws HyracksDataException {
+        for (int i = 0; i < schemaNodes.size(); i++) {
+            ISchemaChecker.SchemaComparisonType schemaComparisonType =
+                    schemaChecker.checkSchema(schemaNodes.get(i), value);
+
+            if (schemaComparisonType.equals(ISchemaChecker.SchemaComparisonType.EQUIVALENT)) {
+                return;
+            } else if (schemaComparisonType.equals(ISchemaChecker.SchemaComparisonType.GROWING)) {
+                schemaNodes.set(i, schemaLazyVisitor.inferSchema(value));
+                closeWriter(i);
+                return;
+            }
+        }
+
+        if (schemaNodes.size() == maxSchemas) {
+            throw new HyracksDataException(ErrorCode.SCHEMA_LIMIT_EXCEEDED, maxSchemas);
+        }
+        schemaNodes.add(schemaLazyVisitor.inferSchema(value));
+        writerList.add(null);
+    }
+
+    public void initNewPartition(IFrameTupleReference tuple) throws HyracksDataException {
+        for (int i = 0; i < writerList.size(); i++) {
+            createOrInitPartition(i, tuple);
+        }
+    }
+
+    public void write(IValueReference value) throws HyracksDataException {
+        for (int i = 0; i < schemaNodes.size(); i++) {
+            if (schemaChecker.checkSchema(schemaNodes.get(i), value)
+                    .equals(ISchemaChecker.SchemaComparisonType.EQUIVALENT)) {
+                createOrWrite(i, value);
+                return;
+            }
+        }
+    }
+
+    public void close() throws HyracksDataException {
+        for (int i = 0; i < writerList.size(); i++) {
+            closeWriter(i);
+        }
+    }
+
+    private void createOrInitPartition(int index, IFrameTupleReference tupleReference) throws HyracksDataException {
+        if (writerList.get(index) == null) {
+            createWriter(index);
+        }
+        writerList.get(index).initNewPartition(tupleReference);
+    }
+
+    private void createOrWrite(int index, IValueReference value) throws HyracksDataException {
+        if (writerList.get(index) == null) {
+            createWriter(index);
+        }
+        writerList.get(index).write(value);
+    }
+
+    private void createWriter(int index) throws HyracksDataException {
+        writerList.set(index, writerFactory.createWriter(schemaNodes.get(index)));
+        writerList.get(index).open();
+    }
+
+    private void closeWriter(int index) throws HyracksDataException {
+        if (writerList.get(index) != null) {
+            writerList.get(index).close();
+            writerList.set(index, null);
+        }
+    }
+
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSinkExternalWriterFactory.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSinkExternalWriterFactory.java
new file mode 100644
index 0000000..3a276a2
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSinkExternalWriterFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.parquet;
+
+import org.apache.asterix.external.writer.printer.ParquetExternalFilePrinterFactory;
+import org.apache.asterix.external.writer.printer.ParquetExternalFilePrinterFactoryProvider;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.runtime.writer.IExternalFileWriterFactory;
+import org.apache.asterix.runtime.writer.IPathResolverFactory;
+import org.apache.hyracks.algebricks.runtime.base.IPushRuntime;
+import org.apache.hyracks.algebricks.runtime.operators.base.AbstractPushRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.writer.WriterPartitionerFactory;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ParquetSinkExternalWriterFactory extends AbstractPushRuntimeFactory {
+    private static final long serialVersionUID = -1285997525553685225L;
+    private final WriterPartitionerFactory partitionerFactory;
+    private final RecordDescriptor inputRecordDesc;
+    private final int sourceColumn;
+    private final int maxSchemas;
+    private final IAType sourceType;
+    private final IExternalFileWriterFactory writerFactory;
+    private final int maxResult;
+    private final ParquetExternalFilePrinterFactoryProvider printerFactoryProvider;
+    private final IPathResolverFactory pathResolverFactory;
+
+    public ParquetSinkExternalWriterFactory(WriterPartitionerFactory partitionerFactory,
+            RecordDescriptor inputRecordDesc, int sourceColumn, IAType sourceType, int maxSchemas,
+            IExternalFileWriterFactory writerFactory, int maxResult,
+            ParquetExternalFilePrinterFactoryProvider printerFactoryProvider,
+            IPathResolverFactory pathResolverFactory) {
+        this.partitionerFactory = partitionerFactory;
+        this.inputRecordDesc = inputRecordDesc;
+        this.sourceColumn = sourceColumn;
+        this.sourceType = sourceType;
+        this.maxSchemas = maxSchemas;
+        this.writerFactory = writerFactory;
+        this.maxResult = maxResult;
+        this.printerFactoryProvider = printerFactoryProvider;
+        this.pathResolverFactory = pathResolverFactory;
+    }
+
+    @Override
+    public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException {
+        ParquetExternalFilePrinterFactory printerFactory =
+                (ParquetExternalFilePrinterFactory) printerFactoryProvider.createPrinterFactory();
+        ParquetExternalWriterFactory parquetExternalWriterFactory = new ParquetExternalWriterFactory(ctx, writerFactory,
+                maxResult, printerFactory, pathResolverFactory.createResolver(ctx));
+        ParquetSinkExternalWriterRuntime runtime =
+                new ParquetSinkExternalWriterRuntime(sourceColumn, partitionerFactory.createPartitioner(),
+                        inputRecordDesc, parquetExternalWriterFactory, sourceType, maxSchemas);
+        return new IPushRuntime[] { runtime };
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSinkExternalWriterRuntime.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSinkExternalWriterRuntime.java
new file mode 100644
index 0000000..3dbd4d3
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/parquet/ParquetSinkExternalWriterRuntime.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.parquet;
+
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.external.writer.printer.parquet.ParquetSchemaLazyVisitor;
+import org.apache.asterix.external.writer.printer.parquet.SchemaCheckerLazyVisitor;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputSinkPushRuntime;
+import org.apache.hyracks.algebricks.runtime.operators.writer.IWriterPartitioner;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IPointable;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class ParquetSinkExternalWriterRuntime extends AbstractOneInputSinkPushRuntime {
+    private final int sourceColumn;
+    private final IWriterPartitioner partitioner;
+    private final IPointable sourceValue;
+    private final ParquetExternalWriterFactory writerFactory;
+    private FrameTupleAccessor tupleAccessor;
+    private FrameTupleReference tupleRef;
+    private IFrameWriter frameWriter;
+    private final int maxSchemas;
+    private final IAType sourceType;
+    private ParquetSchemaInferPoolWriter poolWriter;
+
+    public ParquetSinkExternalWriterRuntime(int sourceColumn, IWriterPartitioner partitioner,
+            RecordDescriptor inputRecordDesc, ParquetExternalWriterFactory writerFactory, IAType sourceType,
+            int maxSchemas) {
+        this.sourceColumn = sourceColumn;
+        this.partitioner = partitioner;
+        this.sourceValue = new VoidPointable();
+        this.inputRecordDesc = inputRecordDesc;
+        this.writerFactory = writerFactory;
+        this.sourceType = sourceType;
+        this.maxSchemas = maxSchemas;
+    }
+
+    @Override
+    public void setOutputFrameWriter(int index, IFrameWriter frameWriter, RecordDescriptor recordDesc) {
+        this.frameWriter = frameWriter;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        if (tupleAccessor == null) {
+            tupleAccessor = new FrameTupleAccessor(inputRecordDesc);
+            tupleRef = new FrameTupleReference();
+        }
+
+        poolWriter = new ParquetSchemaInferPoolWriter(writerFactory, new SchemaCheckerLazyVisitor(sourceType),
+                new ParquetSchemaLazyVisitor(sourceType), maxSchemas);
+        this.frameWriter.open();
+
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        tupleAccessor.reset(buffer);
+
+        for (int i = 0; i < tupleAccessor.getTupleCount(); i++) {
+            tupleRef.reset(tupleAccessor, i);
+            setValue(tupleRef, sourceColumn, sourceValue);
+            poolWriter.inferSchema(sourceValue);
+        }
+
+        for (int i = 0; i < tupleAccessor.getTupleCount(); i++) {
+            tupleRef.reset(tupleAccessor, i);
+            setValue(tupleRef, sourceColumn, sourceValue);
+            if (partitioner.isNewPartition(tupleAccessor, i)) {
+                poolWriter.initNewPartition(tupleRef);
+            }
+            poolWriter.write(sourceValue);
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        frameWriter.fail();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        poolWriter.close();
+        frameWriter.close();
+    }
+
+    private void setValue(IFrameTupleReference tuple, int column, IPointable value) {
+        byte[] data = tuple.getFieldData(column);
+        int start = tuple.getFieldStart(column);
+        int length = tuple.getFieldLength(column);
+        value.set(data, start, length);
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java
index 9e9c003..886f20d 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java
@@ -62,7 +62,7 @@
 
     @Override
     ICloudClient createCloudClient() throws CompilationException {
-        GCSClientConfig config = GCSClientConfig.of(configuration);
+        GCSClientConfig config = GCSClientConfig.of(configuration, writeBufferSize);
         return new GCSCloudClient(config, GCSUtils.buildClient(configuration),
                 ICloudGuardian.NoOpCloudGuardian.INSTANCE);
     }
diff --git a/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/azure/LSMAzBlobStorageTest.java b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/azure/LSMAzBlobStorageTest.java
new file mode 100644
index 0000000..1f49fd9
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/azure/LSMAzBlobStorageTest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.azure;
+
+import org.apache.asterix.cloud.AbstractLSMTest;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageClientConfig;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageCloudClient;
+import org.apache.hyracks.util.StorageUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import com.azure.core.http.rest.PagedIterable;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.ListBlobsOptions;
+import com.azure.storage.common.StorageSharedKeyCredential;
+
+public class LSMAzBlobStorageTest extends AbstractLSMTest {
+    private static BlobContainerClient client;
+
+    private static BlobServiceClient blobServiceClient;
+    private static final int MOCK_SERVER_PORT = 15055;
+    private static final String MOCK_SERVER_HOSTNAME = "http://127.0.0.1:" + MOCK_SERVER_PORT;
+    private static final String MOCK_SERVER_REGION = "us-west-2";
+
+    @BeforeClass
+    public static void setup() throws Exception {
+        LOGGER.info("LSMAzBlobStorageTest setup");
+
+        String endpointString = "http://127.0.0.1:15055/devstoreaccount1/" + PLAYGROUND_CONTAINER;
+        final String accKey =
+                "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
+        final String accName = "devstoreaccount1";
+
+        blobServiceClient = new BlobServiceClientBuilder().endpoint(endpointString)
+                .credential(new StorageSharedKeyCredential(accName, accKey)).buildClient();
+
+        // Start the test clean by deleting any residual data from previous tests
+        blobServiceClient.deleteBlobContainerIfExists(PLAYGROUND_CONTAINER);
+        client = blobServiceClient.createBlobContainerIfNotExists(PLAYGROUND_CONTAINER);
+
+        LOGGER.info("Az Blob Client created successfully");
+        int writeBufferSize = StorageUtil.getIntSizeInBytes(5, StorageUtil.StorageUnit.MEGABYTE);
+        AzBlobStorageClientConfig config = new AzBlobStorageClientConfig(MOCK_SERVER_REGION, MOCK_SERVER_HOSTNAME, "",
+                true, 0, PLAYGROUND_CONTAINER, 1, 0, 0, writeBufferSize);
+        CLOUD_CLIENT = new AzBlobStorageCloudClient(config, ICloudGuardian.NoOpCloudGuardian.INSTANCE);
+    }
+
+    private static void cleanup() {
+        try {
+            PagedIterable<BlobItem> blobItems = client.listBlobs(new ListBlobsOptions().setPrefix(""), null);
+            // Delete all the contents of the container
+            for (BlobItem blobItem : blobItems) {
+                BlobClient blobClient = client.getBlobClient(blobItem.getName());
+                blobClient.delete();
+            }
+            // Delete the container
+            blobServiceClient.deleteBlobContainer(PLAYGROUND_CONTAINER);
+        } catch (Exception ex) {
+            // ignore
+        }
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        LOGGER.info("Shutdown Azurite");
+        // Azure clients do not need explicit closure.
+        cleanup();
+    }
+}
diff --git a/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java
index 3c62cce..08864ac 100644
--- a/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java
+++ b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java
@@ -22,6 +22,7 @@
 import org.apache.asterix.cloud.clients.ICloudGuardian;
 import org.apache.asterix.cloud.clients.google.gcs.GCSClientConfig;
 import org.apache.asterix.cloud.clients.google.gcs.GCSCloudClient;
+import org.apache.hyracks.util.StorageUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
@@ -48,7 +49,9 @@
         client.create(BucketInfo.newBuilder(PLAYGROUND_CONTAINER).setStorageClass(StorageClass.STANDARD)
                 .setLocation(MOCK_SERVER_REGION).build());
         LOGGER.info("Client created successfully");
-        GCSClientConfig config = new GCSClientConfig(MOCK_SERVER_REGION, MOCK_SERVER_HOSTNAME, "", true, 0);
+        int writeBufferSize = StorageUtil.getIntSizeInBytes(5, StorageUtil.StorageUnit.MEGABYTE);
+        GCSClientConfig config =
+                new GCSClientConfig(MOCK_SERVER_REGION, MOCK_SERVER_HOSTNAME, true, 0, writeBufferSize, "");
         CLOUD_CLIENT = new GCSCloudClient(config, ICloudGuardian.NoOpCloudGuardian.INSTANCE);
     }
 
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java
index 6480c30..cb447c8 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.column.assembler;
 
-import static org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
+import static org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
 
 import java.util.ArrayList;
 import java.util.BitSet;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/IFieldNamesDictionary.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/IFieldNamesDictionary.java
deleted file mode 100644
index 8aa0e88..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/IFieldNamesDictionary.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.column.metadata;
-
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.api.IValueReference;
-
-/**
- * methods for defining the fieldName dictionary
- * which is used to encode a fieldName to an index.
- */
-public interface IFieldNamesDictionary {
-    /**
-     * @return get all the inserted field names
-     */
-    List<IValueReference> getFieldNames();
-
-    /**
-     * @param fieldName fieldName byte array
-     * @return returns index if field exist, otherwise insert fieldName and return the new index
-     * @throws HyracksDataException
-     */
-    int getOrCreateFieldNameIndex(IValueReference fieldName) throws HyracksDataException;
-
-    /**
-     * @param fieldName fieldName string
-     * @return returns index if field exist, otherwise insert fieldName and return the new index
-     * @throws HyracksDataException
-     */
-    int getOrCreateFieldNameIndex(String fieldName) throws HyracksDataException;
-
-    /**
-     * @param fieldName
-     * @return index of the field if exists otherwise -1
-     * @throws HyracksDataException
-     */
-    int getFieldNameIndex(String fieldName) throws HyracksDataException;
-
-    /**
-     * @param index encoded index
-     * @return the fieldName present at the requested index
-     */
-    IValueReference getFieldName(int index);
-
-    /**
-     * serialize the dictionary
-     * @param output
-     * @throws IOException
-     */
-    void serialize(DataOutput output) throws IOException;
-
-    /**
-     * resetting and rebuilding the dictionary
-     * @param input
-     * @throws IOException
-     */
-    void abort(DataInputStream input) throws IOException;
-
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/AbstractFieldNamesDictionary.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/AbstractFieldNamesDictionary.java
deleted file mode 100644
index bffdb33..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/AbstractFieldNamesDictionary.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.column.metadata.dictionary;
-
-import java.io.DataInput;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
-import org.apache.asterix.om.base.AMutableString;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.api.IValueReference;
-import org.apache.hyracks.data.std.primitive.VoidPointable;
-import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
-import org.apache.hyracks.util.string.UTF8StringReader;
-import org.apache.hyracks.util.string.UTF8StringWriter;
-
-public abstract class AbstractFieldNamesDictionary implements IFieldNamesDictionary {
-    /**
-     * Dummy field name used to add a column when encountering empty object
-     */
-    public static final IValueReference DUMMY_FIELD_NAME;
-    public static final int DUMMY_FIELD_NAME_INDEX = -1;
-
-    //For declared fields
-    private final AMutableString mutableString;
-    private final AStringSerializerDeserializer stringSerDer;
-
-    static {
-        VoidPointable dummy = new VoidPointable();
-        dummy.set(new byte[0], 0, 0);
-        DUMMY_FIELD_NAME = dummy;
-    }
-
-    AbstractFieldNamesDictionary() {
-        mutableString = new AMutableString("");
-        stringSerDer = new AStringSerializerDeserializer(new UTF8StringWriter(), new UTF8StringReader());
-    }
-
-    public static IFieldNamesDictionary create() {
-        return new FieldNamesTrieDictionary();
-    }
-
-    public static IFieldNamesDictionary deserialize(DataInput input) throws IOException {
-        return FieldNamesTrieDictionary.deserialize(input);
-    }
-
-    static ArrayBackedValueStorage creatFieldName(IValueReference fieldName) throws HyracksDataException {
-        ArrayBackedValueStorage copy = new ArrayBackedValueStorage(fieldName.getLength());
-        copy.append(fieldName);
-        return copy;
-    }
-
-    protected ArrayBackedValueStorage creatFieldName(String fieldName) throws HyracksDataException {
-        ArrayBackedValueStorage serializedFieldName = new ArrayBackedValueStorage();
-        serializeFieldName(fieldName, serializedFieldName);
-        return serializedFieldName;
-    }
-
-    protected void serializeFieldName(String fieldName, ArrayBackedValueStorage storage) throws HyracksDataException {
-        mutableString.setValue(fieldName);
-        stringSerDer.serialize(mutableString, storage.getDataOutput());
-    }
-
-    static void deserializeFieldNames(DataInput input, List<IValueReference> fieldNames, int numberOfFieldNames)
-            throws IOException {
-        for (int i = 0; i < numberOfFieldNames; i++) {
-            int length = input.readInt();
-            ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage(length);
-            fieldName.setSize(length);
-            input.readFully(fieldName.getByteArray(), 0, length);
-            fieldNames.add(fieldName);
-        }
-    }
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/ByteToNodeMap.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/ByteToNodeMap.java
deleted file mode 100644
index 73c034b..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/ByteToNodeMap.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.column.metadata.dictionary;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Collection;
-
-import it.unimi.dsi.fastutil.objects.ObjectArrays;
-
-final class ByteToNodeMap {
-    private static final TrieNode[] EMPTY = new TrieNode[0];
-    private TrieNode[] children;
-    private int numberOfChildren;
-
-    ByteToNodeMap() {
-        children = EMPTY;
-        numberOfChildren = 0;
-    }
-
-    private ByteToNodeMap(TrieNode[] children, int numberOfChildren) {
-        this.children = children;
-        this.numberOfChildren = numberOfChildren;
-    }
-
-    void put(byte key, TrieNode node) {
-        int index = Byte.toUnsignedInt(key);
-        ensure(index);
-        children[index] = node;
-        numberOfChildren++;
-    }
-
-    TrieNode get(byte key) {
-        int index = Byte.toUnsignedInt(key);
-        if (index < children.length) {
-            return children[index];
-        }
-
-        return null;
-    }
-
-    private void ensure(int index) {
-        if (index >= children.length) {
-            children = ObjectArrays.grow(children, index + 1, children.length);
-        }
-    }
-
-    void addAllChildren(Collection<TrieNode> collection) {
-        int addedChildren = 0;
-        for (int i = 0; i < children.length && addedChildren < numberOfChildren; i++) {
-            TrieNode child = children[i];
-            if (child != null) {
-                collection.add(children[i]);
-                addedChildren++;
-            }
-        }
-    }
-
-    void serialize(DataOutput out) throws IOException {
-        out.writeInt(numberOfChildren);
-        out.writeInt(children.length);
-        int addedChildren = 0;
-        for (int i = 0; i < children.length && addedChildren < numberOfChildren; i++) {
-            TrieNode child = children[i];
-            if (child != null) {
-                out.writeInt(i);
-                child.serialize(out);
-                addedChildren++;
-            }
-        }
-    }
-
-    static ByteToNodeMap deserialize(DataInput in) throws IOException {
-        int numberOfChildren = in.readInt();
-        int length = in.readInt();
-        TrieNode[] children = length == 0 ? EMPTY : new TrieNode[length];
-        for (int i = 0; i < numberOfChildren; i++) {
-            int index = in.readInt();
-            children[index] = TrieNode.deserialize(in);
-        }
-
-        return new ByteToNodeMap(children, numberOfChildren);
-    }
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNameTrie.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNameTrie.java
deleted file mode 100644
index 4a19cd6..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNameTrie.java
+++ /dev/null
@@ -1,296 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.column.metadata.dictionary;
-
-import static org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.deserializeFieldNames;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Queue;
-
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.api.IValueReference;
-import org.apache.hyracks.util.string.UTF8StringUtil;
-
-public class FieldNameTrie {
-    private static final int VERSION = 1;
-    private final LookupState lookupState;
-
-    private final List<IValueReference> fieldNames;
-    private TrieNode rootNode;
-
-    public FieldNameTrie() {
-        this(new ArrayList<>());
-    }
-
-    private FieldNameTrie(List<IValueReference> fieldNames) {
-        this.fieldNames = fieldNames;
-        this.rootNode = new TrieNode();
-        lookupState = new LookupState();
-    }
-
-    public int insert(IValueReference fieldName) throws HyracksDataException {
-        int presentIndex = lookup(fieldName);
-        if (presentIndex == TrieNode.NOT_FOUND_INDEX) {
-            presentIndex = hookup(FieldNamesTrieDictionary.creatFieldName(fieldName));
-        }
-        return presentIndex;
-    }
-
-    public int lookup(IValueReference fieldName) {
-        //noinspection DuplicatedCode
-        int len = UTF8StringUtil.getUTFLength(fieldName.getByteArray(), fieldName.getStartOffset());
-        int start = fieldName.getStartOffset() + UTF8StringUtil.getNumBytesToStoreLength(len);
-        byte[] bytes = fieldName.getByteArray();
-
-        TrieNode searchNode = rootNode;
-        TrieNode prevNode = searchNode;
-
-        int byteIndex = start;
-        // previousByteIndex should point to the first byte to be compared
-        // when inserting the fieldName
-        int previousByteIndex = byteIndex;
-
-        int lastIndex = (start + len - 1);
-        while (byteIndex <= lastIndex) {
-            byte b = bytes[byteIndex];
-
-            TrieNode nextNode = searchNode.getChild(b);
-            if (nextNode == null) {
-                // saving state in case hookup is requested
-                lookupState.setState(prevNode, start, previousByteIndex, len);
-                return TrieNode.NOT_FOUND_INDEX;
-            }
-            // if the node exists, then compare the remaining byte seq.
-            prevNode = searchNode;
-            searchNode = nextNode;
-
-            if (searchNode.getLength() > 1) { // first byte will be same as byteIndex
-                // compare the stored sequence.
-                int fieldBytesLeftToCompare = lastIndex - byteIndex + 1;
-                // if the stored sequence in node is greater than the input field's
-                // byte to compare, then the result won't be there.
-                if (fieldBytesLeftToCompare < searchNode.getLength()) {
-                    // saving state in case hookup is requested
-                    lookupState.setState(prevNode, start, byteIndex, len);
-                    return TrieNode.NOT_FOUND_INDEX;
-                }
-
-                int c = 0;
-                byte[] storedFieldBytes = fieldNames.get(searchNode.getIndex()).getByteArray();
-                int storedFieldStart = searchNode.getStart();
-                previousByteIndex = byteIndex;
-                while (c < searchNode.getLength()) {
-                    if (bytes[byteIndex] != storedFieldBytes[storedFieldStart + c]) {
-                        // saving state in case hookup is requested
-                        // will restart from oldByteIndex, to make logic simpler.
-                        // other way could have been to store the splitIndex.
-                        lookupState.setState(prevNode, start, previousByteIndex, len);
-                        return TrieNode.NOT_FOUND_INDEX;
-                    }
-                    c++;
-                    byteIndex++;
-                }
-            } else {
-                previousByteIndex = byteIndex;
-                byteIndex++;
-            }
-        }
-
-        // saving state in case hookup is requested
-        lookupState.setState(prevNode, start, previousByteIndex, len);
-        return searchNode.isEndOfField() ? searchNode.getIndex() : TrieNode.NOT_FOUND_INDEX;
-    }
-
-    private int hookup(IValueReference fieldName) {
-        // since lookup operation always precedes a hookup operation
-        // we can use the saved state to start hookup.
-        int len = lookupState.getFieldLength();
-        TrieNode searchNode = lookupState.getLastNode();
-
-        // resume from the stored node.
-        int bytesToStoreLength = UTF8StringUtil.getNumBytesToStoreLength(len);
-
-        int byteIndex = lookupState.getRelativeOffsetFromStart() + bytesToStoreLength;
-        byte[] bytes = fieldName.getByteArray();
-        int lastIndex = (bytesToStoreLength + len - 1);
-        while (byteIndex <= lastIndex) {
-            byte b = bytes[byteIndex];
-            TrieNode nextNode = searchNode.getChild(b);
-            if (nextNode == null) {
-                // since there no such node, then create a node, and put
-                // rest bytes in the nodes.
-                TrieNode childNode = new TrieNode();
-                // first insert, then add the field
-                // start from byteIndex with newLength.
-                // newLength = lastIndex - byteIndex + 1
-                childNode.setIndex(fieldNames.size(), byteIndex, lastIndex - byteIndex + 1, bytesToStoreLength);
-                childNode.setIsEndOfField(true);
-                fieldNames.add(fieldName);
-
-                searchNode.putChild(b, childNode);
-                return childNode.getIndex();
-            }
-            // if node exists, compare the remaining byte seq
-            searchNode = nextNode;
-
-            if (searchNode.getLength() > 1) {
-                // compare the byte seq
-                int c = 0;
-                int oldByteIndex = byteIndex;
-
-                IValueReference storedFieldName = fieldNames.get(searchNode.getIndex());
-                byte[] storedFieldBytes = storedFieldName.getByteArray();
-                int storedFieldStart = searchNode.getStart();
-                while (c < Math.min(searchNode.getLength(), lastIndex - oldByteIndex + 1)) {
-                    if (bytes[byteIndex] != storedFieldBytes[storedFieldStart + c]) {
-                        break;
-                    }
-                    c++;
-                    byteIndex++;
-                }
-
-                // from c & byteIndex, things are not matching,
-                // split into two nodes,
-                // one from (c, ...) -> handled below
-                // other from (byteIndex, ...) -> handled in the next iteration, as byteIndex will be absent.
-
-                // handling (c, ..)
-                int leftToSplitForCurrentNode = searchNode.getLength() - c;
-                if (leftToSplitForCurrentNode > 0) {
-                    searchNode.split(storedFieldName, c);
-                }
-            } else {
-                byteIndex++;
-            }
-        }
-
-        // since the node is already present,
-        // point it to the current fieldName, and update the start and length based on the fieldName
-        // prefix would be the same
-        // find absolute starting point in the current fieldName
-        int diff = searchNode.getStart() - searchNode.getBytesToStoreLength();
-        // since hookup happens on a new fieldName, hence start will be bytesToStoreLength
-        searchNode.setIndex(fieldNames.size(), bytesToStoreLength + diff, searchNode.getLength(), bytesToStoreLength);
-        searchNode.setIsEndOfField(true);
-        fieldNames.add(fieldName);
-        return searchNode.getIndex();
-    }
-
-    public void serialize(DataOutput out) throws IOException {
-        out.writeInt(VERSION);
-
-        // serialize fieldNames
-        out.writeInt(fieldNames.size());
-        for (IValueReference fieldName : fieldNames) {
-            out.writeInt(fieldName.getLength());
-            out.write(fieldName.getByteArray(), fieldName.getStartOffset(), fieldName.getLength());
-        }
-
-        rootNode.serialize(out);
-    }
-
-    public List<IValueReference> getFieldNames() {
-        return fieldNames;
-    }
-
-    public IValueReference getFieldName(int fieldIndex) {
-        return fieldNames.get(fieldIndex);
-    }
-
-    public void clear() {
-        rootNode = null;
-        fieldNames.clear();
-    }
-
-    public static FieldNameTrie deserialize(DataInput in) throws IOException {
-        int version = in.readInt();
-        if (version == VERSION) {
-            return deserializeV1(in);
-        }
-        throw new IllegalStateException("Unsupported version: " + version);
-    }
-
-    private static FieldNameTrie deserializeV1(DataInput in) throws IOException {
-        int numberOfFieldNames = in.readInt();
-
-        List<IValueReference> fieldNames = new ArrayList<>();
-        deserializeFieldNames(in, fieldNames, numberOfFieldNames);
-
-        FieldNameTrie newTrie = new FieldNameTrie(fieldNames);
-        newTrie.rootNode = TrieNode.deserialize(in);
-
-        return newTrie;
-    }
-
-    @Override
-    public String toString() {
-        TrieNode currentNode = rootNode;
-        Queue<TrieNode> queue = new ArrayDeque<>();
-        currentNode.getChildren().addAllChildren(queue);
-        StringBuilder treeBuilder = new StringBuilder();
-        while (!queue.isEmpty()) {
-            int len = queue.size();
-            for (int i = 0; i < len; i++) {
-                TrieNode node = queue.poll();
-                assert node != null;
-                byte[] bytes = fieldNames.get(node.getIndex()).getByteArray();
-                for (int j = 0; j < node.getLength(); j++) {
-                    treeBuilder.append((char) bytes[node.getStart() + j]);
-                }
-                treeBuilder.append("(").append(node.isEndOfField()).append(")");
-                if (i != len - 1) {
-                    treeBuilder.append(" | ");
-                }
-
-                node.getChildren().addAllChildren(queue);
-            }
-            treeBuilder.append("\n");
-        }
-        return treeBuilder.toString();
-    }
-
-    private static class LookupState {
-        private TrieNode lastNode;
-        private int relativeOffsetFromStart;
-        private int fieldLength;
-
-        public void setState(TrieNode lastNode, int startIndex, int continuationByteIndex, int fieldLength) {
-            this.lastNode = lastNode;
-            this.relativeOffsetFromStart = continuationByteIndex - startIndex;
-            this.fieldLength = fieldLength;
-        }
-
-        public TrieNode getLastNode() {
-            return lastNode;
-        }
-
-        public int getRelativeOffsetFromStart() {
-            return relativeOffsetFromStart;
-        }
-
-        public int getFieldLength() {
-            return fieldLength;
-        }
-    }
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesHashDictionary.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesHashDictionary.java
deleted file mode 100644
index 73c9a73..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesHashDictionary.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.column.metadata.dictionary;
-
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hyracks.api.dataflow.value.IBinaryHashFunction;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
-import org.apache.hyracks.data.std.api.IValueReference;
-import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
-import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
-
-import it.unimi.dsi.fastutil.ints.Int2IntMap;
-import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
-import it.unimi.dsi.fastutil.objects.Object2IntMap;
-import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
-
-/**
- * @deprecated Use {@link FieldNamesTrieDictionary}
- */
-@Deprecated
-public class FieldNamesHashDictionary extends AbstractFieldNamesDictionary {
-    //For both declared and inferred fields
-    private final List<IValueReference> fieldNames;
-    private final Object2IntMap<String> declaredFieldNamesToIndexMap;
-    private final Int2IntMap hashToFieldNameIndexMap;
-    private final IBinaryHashFunction fieldNameHashFunction;
-
-    //For lookups
-    private final ArrayBackedValueStorage lookupStorage;
-
-    public FieldNamesHashDictionary() {
-        this(new ArrayList<>(), new Object2IntOpenHashMap<>(), new Int2IntOpenHashMap());
-    }
-
-    private FieldNamesHashDictionary(List<IValueReference> fieldNames,
-            Object2IntMap<String> declaredFieldNamesToIndexMap, Int2IntMap hashToFieldNameIndexMap) {
-        super();
-        this.fieldNames = fieldNames;
-        this.declaredFieldNamesToIndexMap = declaredFieldNamesToIndexMap;
-        this.hashToFieldNameIndexMap = hashToFieldNameIndexMap;
-        fieldNameHashFunction =
-                new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY).createBinaryHashFunction();
-        lookupStorage = new ArrayBackedValueStorage();
-    }
-
-    @Override
-    public List<IValueReference> getFieldNames() {
-        return fieldNames;
-    }
-
-    //TODO solve collision (they're so rare that I haven't seen any)
-    @Override
-    public int getOrCreateFieldNameIndex(IValueReference fieldName) throws HyracksDataException {
-        if (fieldName == DUMMY_FIELD_NAME) {
-            return DUMMY_FIELD_NAME_INDEX;
-        }
-
-        int hash = getHash(fieldName);
-        if (!hashToFieldNameIndexMap.containsKey(hash)) {
-            int index = addFieldName(creatFieldName(fieldName), hash);
-            hashToFieldNameIndexMap.put(hash, index);
-            return index;
-        }
-        return hashToFieldNameIndexMap.get(hash);
-    }
-
-    @Override
-    public int getOrCreateFieldNameIndex(String fieldName) throws HyracksDataException {
-        if (!declaredFieldNamesToIndexMap.containsKey(fieldName)) {
-            IValueReference serializedFieldName = creatFieldName(fieldName);
-            int hash = getHash(serializedFieldName);
-            int index = addFieldName(serializedFieldName, hash);
-            declaredFieldNamesToIndexMap.put(fieldName, index);
-            return index;
-        }
-        return declaredFieldNamesToIndexMap.getInt(fieldName);
-    }
-
-    @Override
-    public int getFieldNameIndex(String fieldName) throws HyracksDataException {
-        lookupStorage.reset();
-        serializeFieldName(fieldName, lookupStorage);
-        return hashToFieldNameIndexMap.getOrDefault(getHash(lookupStorage), -1);
-    }
-
-    private int getHash(IValueReference fieldName) throws HyracksDataException {
-        byte[] object = fieldName.getByteArray();
-        int start = fieldName.getStartOffset();
-        int length = fieldName.getLength();
-
-        return fieldNameHashFunction.hash(object, start, length);
-    }
-
-    private int addFieldName(IValueReference fieldName, int hash) {
-        int index = fieldNames.size();
-        hashToFieldNameIndexMap.put(hash, index);
-        fieldNames.add(fieldName);
-        return index;
-    }
-
-    @Override
-    public IValueReference getFieldName(int index) {
-        if (index == DUMMY_FIELD_NAME_INDEX) {
-            return DUMMY_FIELD_NAME;
-        }
-        return fieldNames.get(index);
-    }
-
-    @Override
-    public void serialize(DataOutput output) throws IOException {
-        output.writeInt(fieldNames.size());
-        for (IValueReference fieldName : fieldNames) {
-            output.writeInt(fieldName.getLength());
-            output.write(fieldName.getByteArray(), fieldName.getStartOffset(), fieldName.getLength());
-        }
-
-        output.writeInt(declaredFieldNamesToIndexMap.size());
-        for (Object2IntMap.Entry<String> declaredFieldIndex : declaredFieldNamesToIndexMap.object2IntEntrySet()) {
-            output.writeUTF(declaredFieldIndex.getKey());
-            output.writeInt(declaredFieldIndex.getIntValue());
-        }
-
-        for (Int2IntMap.Entry hashIndex : hashToFieldNameIndexMap.int2IntEntrySet()) {
-            output.writeInt(hashIndex.getIntKey());
-            output.writeInt(hashIndex.getIntValue());
-        }
-    }
-
-    public static FieldNamesHashDictionary deserialize(DataInput input) throws IOException {
-        int numberOfFieldNames = input.readInt();
-
-        List<IValueReference> fieldNames = new ArrayList<>();
-        deserializeFieldNames(input, fieldNames, numberOfFieldNames);
-
-        Object2IntMap<String> declaredFieldNamesToIndexMap = new Object2IntOpenHashMap<>();
-        deserializeDeclaredFieldNames(input, declaredFieldNamesToIndexMap);
-
-        Int2IntMap hashToFieldNameIndexMap = new Int2IntOpenHashMap();
-        deserializeHashToFieldNameIndex(input, hashToFieldNameIndexMap, numberOfFieldNames);
-
-        return new FieldNamesHashDictionary(fieldNames, declaredFieldNamesToIndexMap, hashToFieldNameIndexMap);
-    }
-
-    @Override
-    public void abort(DataInputStream input) throws IOException {
-        int numberOfFieldNames = input.readInt();
-
-        fieldNames.clear();
-        deserializeFieldNames(input, fieldNames, numberOfFieldNames);
-
-        declaredFieldNamesToIndexMap.clear();
-        deserializeDeclaredFieldNames(input, declaredFieldNamesToIndexMap);
-
-        hashToFieldNameIndexMap.clear();
-        deserializeHashToFieldNameIndex(input, hashToFieldNameIndexMap, numberOfFieldNames);
-    }
-
-    private static void deserializeDeclaredFieldNames(DataInput input,
-            Object2IntMap<String> declaredFieldNamesToIndexMap) throws IOException {
-        int numberOfDeclaredFieldNames = input.readInt();
-        for (int i = 0; i < numberOfDeclaredFieldNames; i++) {
-            String fieldName = input.readUTF();
-            int fieldNameIndex = input.readInt();
-            declaredFieldNamesToIndexMap.put(fieldName, fieldNameIndex);
-        }
-    }
-
-    private static void deserializeHashToFieldNameIndex(DataInput input, Int2IntMap hashToFieldNameIndexMap,
-            int numberOfFieldNames) throws IOException {
-        for (int i = 0; i < numberOfFieldNames; i++) {
-            int hash = input.readInt();
-            int fieldNameIndex = input.readInt();
-            hashToFieldNameIndexMap.put(hash, fieldNameIndex);
-        }
-    }
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesTrieDictionary.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesTrieDictionary.java
deleted file mode 100644
index 10de829..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesTrieDictionary.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.column.metadata.dictionary;
-
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.api.IValueReference;
-import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
-
-public class FieldNamesTrieDictionary extends AbstractFieldNamesDictionary {
-    private FieldNameTrie dictionary;
-    //For lookups
-    private final ArrayBackedValueStorage lookupStorage;
-
-    public FieldNamesTrieDictionary() {
-        this(new FieldNameTrie());
-    }
-
-    private FieldNamesTrieDictionary(FieldNameTrie dictionary) {
-        super();
-        this.dictionary = dictionary;
-        lookupStorage = new ArrayBackedValueStorage();
-    }
-
-    @Override
-    public List<IValueReference> getFieldNames() {
-        return dictionary.getFieldNames();
-    }
-
-    @Override
-    public int getOrCreateFieldNameIndex(IValueReference fieldName) throws HyracksDataException {
-        if (fieldName == DUMMY_FIELD_NAME) {
-            return DUMMY_FIELD_NAME_INDEX;
-        }
-
-        return dictionary.insert(fieldName);
-    }
-
-    @Override
-    public int getOrCreateFieldNameIndex(String fieldName) throws HyracksDataException {
-        return getOrCreateFieldNameIndex(creatFieldName(fieldName));
-    }
-
-    @Override
-    public int getFieldNameIndex(String fieldName) throws HyracksDataException {
-        lookupStorage.reset();
-        serializeFieldName(fieldName, lookupStorage);
-        return dictionary.lookup(lookupStorage);
-    }
-
-    @Override
-    public IValueReference getFieldName(int index) {
-        if (index == DUMMY_FIELD_NAME_INDEX) {
-            return DUMMY_FIELD_NAME;
-        }
-        return dictionary.getFieldName(index);
-    }
-
-    @Override
-    public void serialize(DataOutput output) throws IOException {
-        dictionary.serialize(output);
-    }
-
-    public static FieldNamesTrieDictionary deserialize(DataInput input) throws IOException {
-        FieldNameTrie fieldNameTrie = FieldNameTrie.deserialize(input);
-        return new FieldNamesTrieDictionary(fieldNameTrie);
-    }
-
-    @Override
-    public void abort(DataInputStream input) throws IOException {
-        dictionary.clear();
-        dictionary = FieldNameTrie.deserialize(input);
-    }
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/TrieNode.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/TrieNode.java
deleted file mode 100644
index 32e902b..0000000
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/TrieNode.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.column.metadata.dictionary;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hyracks.data.std.api.IValueReference;
-
-class TrieNode {
-    public static final int NOT_FOUND_INDEX = -1;
-
-    private ByteToNodeMap children;
-    private boolean isEndOfField;
-    private int index;
-    private int start; // includes the edges' byte
-    private int length; // includes the edges' byte
-    private int bytesToStoreLength;
-
-    TrieNode() {
-        this.children = new ByteToNodeMap();
-        index = NOT_FOUND_INDEX;
-    }
-
-    TrieNode(ByteToNodeMap children) {
-        this.children = children;
-        index = NOT_FOUND_INDEX;
-    }
-
-    public void setIndex(int index, int start, int length, int bytesToStoreLength) {
-        this.index = index;
-        this.start = start;
-        this.length = length;
-        this.bytesToStoreLength = bytesToStoreLength;
-    }
-
-    public void setIsEndOfField(boolean isEndOfField) {
-        this.isEndOfField = isEndOfField;
-    }
-
-    public TrieNode getChild(byte key) {
-        return children.get(key);
-    }
-
-    public void putChild(byte key, TrieNode child) {
-        children.put(key, child);
-    }
-
-    public ByteToNodeMap getChildren() {
-        return children;
-    }
-
-    public int getIndex() {
-        return index;
-    }
-
-    public int getStart() {
-        return start;
-    }
-
-    public int getLength() {
-        return length;
-    }
-
-    public int getBytesToStoreLength() {
-        return bytesToStoreLength;
-    }
-
-    public boolean isEndOfField() {
-        return isEndOfField;
-    }
-
-    public void reset() {
-        // since this object went to the new node.
-        children = new ByteToNodeMap();
-    }
-
-    public void split(IValueReference fieldName, int splitIndex) {
-        byte[] storedFieldBytes = fieldName.getByteArray();
-        byte splitByte = storedFieldBytes[start + splitIndex];
-        // something to be split, have to create a new node
-        // and do the linking.
-        TrieNode childNode = new TrieNode(children);
-
-        int leftToSplit = length - splitIndex;
-        childNode.setIndex(index, start + splitIndex, leftToSplit, bytesToStoreLength);
-        childNode.setIsEndOfField(isEndOfField);
-        // update the current search node
-        // new length would be 'c'
-        reset();
-        setIndex(index, start, splitIndex, bytesToStoreLength);
-        putChild(splitByte, childNode);
-        // since there was a split in searchNode, hence isEndOfField will be false.
-        setIsEndOfField(false);
-    }
-
-    public void serialize(DataOutput out) throws IOException {
-        // Serialize child first
-        children.serialize(out);
-        // serialize fields
-        out.writeBoolean(isEndOfField);
-        out.writeInt(index);
-        out.writeInt(start);
-        out.writeInt(length);
-        out.writeInt(bytesToStoreLength);
-    }
-
-    public static TrieNode deserialize(DataInput in) throws IOException {
-        ByteToNodeMap children = ByteToNodeMap.deserialize(in);
-        TrieNode node = new TrieNode(children);
-        node.isEndOfField = in.readBoolean();
-        node.index = in.readInt();
-        node.start = in.readInt();
-        node.length = in.readInt();
-        node.bytesToStoreLength = in.readInt();
-
-        return node;
-    }
-}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java
index 0bea188..1f74fb3 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.column.metadata.schema;
 
-import static org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
+import static org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
 
 import java.io.DataInput;
 import java.io.DataInputStream;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java
index 05c4eda..c7d3df1 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java
@@ -22,12 +22,12 @@
 
 import java.util.List;
 
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
 import org.apache.asterix.column.metadata.schema.collection.AbstractCollectionSchemaNode;
 import org.apache.asterix.column.metadata.schema.primitive.MissingFieldSchemaNode;
 import org.apache.asterix.column.operation.lsm.flush.FlushColumnMetadata;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.AUnionType;
 import org.apache.asterix.om.types.AbstractCollectionType;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java
index afe8368..ff05568 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java
@@ -21,13 +21,13 @@
 import java.io.IOException;
 import java.util.Map;
 
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
 import org.apache.asterix.column.metadata.schema.UnionSchemaNode;
 import org.apache.asterix.column.metadata.schema.collection.AbstractCollectionSchemaNode;
 import org.apache.asterix.column.metadata.schema.primitive.MissingFieldSchemaNode;
 import org.apache.asterix.column.metadata.schema.primitive.PrimitiveSchemaNode;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.AUnionType;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java
index f514638..87f9ff3 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java
@@ -33,9 +33,7 @@
 import java.util.Map;
 
 import org.apache.asterix.column.metadata.AbstractColumnMetadata;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
 import org.apache.asterix.column.metadata.PathInfoSerializer;
-import org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNestedNode;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
@@ -51,6 +49,8 @@
 import org.apache.asterix.column.values.IColumnValuesWriter;
 import org.apache.asterix.column.values.IColumnValuesWriterFactory;
 import org.apache.asterix.column.values.writer.AbstractColumnValuesWriter;
+import org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.commons.lang3.mutable.Mutable;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java
index e507d53..15a6277 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java
@@ -41,8 +41,6 @@
 import org.apache.asterix.column.filter.range.IColumnRangeFilterEvaluatorFactory;
 import org.apache.asterix.column.filter.range.IColumnRangeFilterValueAccessor;
 import org.apache.asterix.column.metadata.AbstractColumnImmutableReadMetadata;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
 import org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor;
@@ -50,6 +48,8 @@
 import org.apache.asterix.column.values.IColumnValuesReader;
 import org.apache.asterix.column.values.IColumnValuesReaderFactory;
 import org.apache.asterix.column.values.reader.PrimitiveColumnValuesReader;
+import org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.runtime.projection.FunctionCallInformation;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java
index 356ddaa..d931242 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java
@@ -38,14 +38,14 @@
 import org.apache.asterix.column.filter.iterable.IColumnIterableFilterEvaluatorFactory;
 import org.apache.asterix.column.filter.range.IColumnRangeFilterEvaluatorFactory;
 import org.apache.asterix.column.filter.range.IColumnRangeFilterValueAccessor;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
 import org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor;
 import org.apache.asterix.column.values.IColumnValuesReader;
 import org.apache.asterix.column.values.IColumnValuesReaderFactory;
 import org.apache.asterix.column.values.reader.PrimitiveColumnValuesReader;
+import org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.runtime.projection.FunctionCallInformation;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java
index b425a26..a4b9240 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java
@@ -21,7 +21,6 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ISchemaNodeVisitor;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
@@ -30,6 +29,7 @@
 import org.apache.asterix.column.metadata.schema.primitive.PrimitiveSchemaNode;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.hyracks.data.std.util.ByteArrayAccessibleDataInputStream;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java
index 6d991cf..ad0e460 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java
@@ -21,7 +21,6 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
 import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
 import org.apache.asterix.column.metadata.schema.ISchemaNodeVisitor;
 import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
@@ -30,6 +29,7 @@
 import org.apache.asterix.column.metadata.schema.primitive.PrimitiveSchemaNode;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.hyracks.data.std.util.ByteArrayAccessibleDataInputStream;
diff --git a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java
index 63c2b22..4b33e19 100644
--- a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java
+++ b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java
@@ -20,11 +20,11 @@
 
 import java.util.concurrent.TimeUnit;
 
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.column.metadata.dictionary.FieldNamesHashDictionary;
-import org.apache.asterix.column.metadata.dictionary.FieldNamesTrieDictionary;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.dictionary.FieldNamesHashDictionary;
+import org.apache.asterix.om.dictionary.FieldNamesTrieDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
diff --git a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java
index c9f58d5..1ec468c 100644
--- a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java
+++ b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java
@@ -28,8 +28,8 @@
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.asterix.column.metadata.dictionary.FieldNameTrie;
-import org.apache.asterix.column.metadata.dictionary.FieldNamesTrieDictionary;
+import org.apache.asterix.om.dictionary.FieldNameTrie;
+import org.apache.asterix.om.dictionary.FieldNamesTrieDictionary;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
index b6bce47..ba26ba7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
@@ -19,13 +19,29 @@
 
 package org.apache.asterix.common.api;
 
+import org.apache.commons.lang3.StringUtils;
+
 @FunctionalInterface
 public interface IIdentifierMapper {
 
     enum Modifier {
         SINGULAR,
+        SINGULAR_CAPITALIZED,
         PLURAL,
-        NONE
+        PLURAL_CAPITALIZED,
+        NONE,
+        NONE_CAPITALIZED;
+
+        public String fixup(String input) {
+            switch (this) {
+                case SINGULAR_CAPITALIZED:
+                case PLURAL_CAPITALIZED:
+                case NONE_CAPITALIZED:
+                    return StringUtils.capitalize(input);
+                default:
+                    return input;
+            }
+        }
     }
 
     String map(String identifier, Modifier modifier);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
index 8ca5c94..6f11e1e 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
@@ -152,7 +152,12 @@
         COMPILER_COPY_TO_WRITE_BUFFER_SIZE(
                 getRangedIntegerType(5, Integer.MAX_VALUE),
                 StorageUtil.getIntSizeInBytes(8, StorageUtil.StorageUnit.MEGABYTE),
-                "The COPY TO write buffer size in bytes. (default: 8MB, min: 5MB)");
+                "The COPY TO write buffer size in bytes. (default: 8MB, min: 5MB)"),
+        COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING(
+                getRangedIntegerType(0, Integer.MAX_VALUE),
+                128,
+                "Maximum occurrences of a variable allowed in an expression for inlining"),
+        COMPILER_ORDERFIELDS(BOOLEAN, AlgebricksConfig.ORDERED_FIELDS, "Enable/disable select order list");
 
         private final IOptionType type;
         private final Object defaultValue;
@@ -234,6 +239,11 @@
 
     public static final String COMPILER_COLUMN_FILTER_KEY = Option.COMPILER_COLUMN_FILTER.ini();
 
+    public static final String COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING_KEY =
+            Option.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING.ini();
+
+    public static final String COMPILER_ORDERFIELDS_KEY = Option.COMPILER_ORDERFIELDS.ini();
+
     public static final int COMPILER_PARALLELISM_AS_STORAGE = 0;
 
     public CompilerProperties(PropertiesAccessor accessor) {
@@ -362,6 +372,10 @@
         return accessor.getBoolean(Option.COMPILER_COLUMN_FILTER);
     }
 
+    public boolean isOrderedFields() {
+        return accessor.getBoolean(Option.COMPILER_ORDERFIELDS);
+    }
+
     public int getRuntimeMemoryOverheadPercentage() {
         return accessor.getInt(Option.COMPILER_RUNTIME_MEMORY_OVERHEAD);
     }
@@ -369,4 +383,8 @@
     public int getCopyToWriteBufferSize() {
         return accessor.getInt(Option.COMPILER_COPY_TO_WRITE_BUFFER_SIZE);
     }
+
+    public int getMaxVariableOccurrencesForInlining() {
+        return accessor.getInt(Option.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING);
+    }
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java
index 160c04d..fe5ec9e 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java
@@ -95,6 +95,10 @@
                 compilerProperties.getQueryPlanShapeMode());
         boolean columnFilter = getBoolean(querySpecificConfig, CompilerProperties.COMPILER_COLUMN_FILTER_KEY,
                 compilerProperties.isColumnFilter());
+        int maxVariableOccurrencesForInlining =
+                getMaxVariableOccurrencesForInlining(compilerProperties, querySpecificConfig, sourceLoc);
+        boolean orderFields = getBoolean(querySpecificConfig, CompilerProperties.COMPILER_ORDERFIELDS_KEY,
+                compilerProperties.isOrderedFields());
 
         PhysicalOptimizationConfig physOptConf = new PhysicalOptimizationConfig();
         physOptConf.setFrameSize(frameSize);
@@ -123,6 +127,8 @@
         physOptConf.setMinJoinFrames(compilerProperties.getMinJoinMemoryFrames());
         physOptConf.setMinGroupFrames(compilerProperties.getMinGroupMemoryFrames());
         physOptConf.setMinWindowFrames(compilerProperties.getMinWindowMemoryFrames());
+        physOptConf.setMaxVariableOccurrencesForInlining(maxVariableOccurrencesForInlining);
+        physOptConf.setOrderFields(orderFields);
 
         // We should have already validated the parameter names at this point...
         Set<String> filteredParameterNames = new HashSet<>(parameterNames);
@@ -219,4 +225,16 @@
         }
         return defaultValue;
     }
+
+    private static int getMaxVariableOccurrencesForInlining(CompilerProperties compilerProperties,
+            Map<String, Object> querySpecificConfig, SourceLocation sourceLoc) throws AsterixException {
+        String valueInQuery =
+                (String) querySpecificConfig.get(CompilerProperties.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING_KEY);
+        try {
+            return valueInQuery == null ? compilerProperties.getMaxVariableOccurrencesForInlining()
+                    : OptionTypes.NONNEGATIVE_INTEGER.parse(valueInQuery);
+        } catch (IllegalArgumentException e) {
+            throw AsterixException.create(ErrorCode.COMPILATION_ERROR, sourceLoc, e.getMessage());
+        }
+    }
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
index 091fb67..0cf6eb2 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.common.exceptions;
 
+import org.apache.asterix.common.utils.IdentifierUtil;
 import org.apache.hyracks.api.exceptions.IError;
 import org.apache.hyracks.api.util.ErrorMessageUtil;
 
@@ -97,7 +98,6 @@
     UNSUPPORTED_COLUMN_TYPE(67),
     INVALID_KEY_TYPE(68),
     FAILED_TO_READ_KEY(69),
-
     UNSUPPORTED_JRE(100),
 
     EXTERNAL_UDF_RESULT_TYPE_ERROR(200),
@@ -304,6 +304,16 @@
     INCOMPATIBLE_FIELDS_IN_PRIMARY_KEY(1199),
     PREFIX_SHOULD_NOT_START_WITH_SLASH(1200),
     INVALID_DELTA_TABLE_FORMAT(1201),
+    UNSUPPORTED_WRITER_COMPRESSION_SCHEME(1202),
+    INVALID_PARQUET_SCHEMA(1203),
+    TYPE_UNSUPPORTED_PARQUET_WRITE(1204),
+    INVALID_PARQUET_WRITER_VERSION(1205),
+    ILLEGAL_SIZE_PROVIDED(1206),
+    TYPE_UNSUPPORTED_CSV_WRITE(1207),
+    INVALID_CSV_SCHEMA(1208),
+    MAXIMUM_VALUE_ALLOWED_FOR_PARAM(1209),
+    STORAGE_SIZE_NOT_APPLICABLE_TO_TYPE(1210),
+    COULD_NOT_CREATE_TOKENS(1211),
 
     // Feed errors
     DATAFLOW_ILLEGAL_STATE(3001),
@@ -423,6 +433,10 @@
     PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT(3122),
     // Avro error
     UNSUPPORTED_TYPE_FOR_AVRO(3123),
+    // Copy to CSV Error
+    INVALID_QUOTE(3124),
+    INVALID_FORCE_QUOTE(3125),
+    INVALID_ESCAPE(3126),
 
     // Lifecycle management errors
     DUPLICATE_PARTITION_ID(4000),
@@ -461,8 +475,8 @@
     }
 
     private static class ErrorMessageMapHolder {
-        private static final String[] enumMessages =
-                ErrorMessageUtil.defineMessageEnumOrdinalMap(values(), RESOURCE_PATH);
+        private static final String[] enumMessages = IdentifierUtil
+                .replaceIdentifiers(ErrorMessageUtil.defineMessageEnumOrdinalMap(values(), RESOURCE_PATH));
 
         private static String get(ErrorCode errorCode) {
             return enumMessages[errorCode.ordinal()];
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
index 9b1d5ae..05b21af 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
@@ -38,6 +38,10 @@
     public static final String SYSTEM_DATABASE = "System";
     public static final String DEFAULT_DATABASE = "Default";
 
+    // Pre-defined creator
+    public static final String DEFAULT_CREATOR = "@sys";
+    public static final String DEFAULT_CREATOR_UUID = "97c793f3-bcbf-4595-8bf0-e9d6a5953523";
+
     // Name of the dataverse the metadata lives in.
     public static final DataverseName METADATA_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Metadata");
 
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ConstantUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ConstantUtil.java
new file mode 100644
index 0000000..8bee6b2
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ConstantUtil.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.utils;
+
+public class ConstantUtil {
+
+    public static final String PERCENT = "%";
+    public static final String UNDERSCORE = "_";
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
index 8157125..a32f7f9 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
@@ -21,42 +21,59 @@
 
 import static org.apache.asterix.common.utils.IdentifierUtil.DATASET;
 import static org.apache.asterix.common.utils.IdentifierUtil.DATAVERSE;
+import static org.apache.asterix.common.utils.IdentifierUtil.PRODUCT_ABBREVIATION;
+import static org.apache.asterix.common.utils.IdentifierUtil.PRODUCT_NAME;
 
 import org.apache.asterix.common.api.IIdentifierMapper;
 import org.apache.asterix.common.api.IIdentifierMapper.Modifier;
 
 public class IdentifierMappingUtil {
 
+    private static final String PLAIN_DATASET = "dataset";
     private static final String SINGULAR_DATASET = "a dataset";
     private static final String PLURAL_DATASET = "datasets";
 
+    private static final String PLAIN_DATAVERSE = "dataverse";
     private static final String SINGULAR_DATAVERSE = "a dataverse";
     private static final String PLURAL_DATAVERSE = "dataverses";
 
+    private static final String DEFAULT_PRODUCT_NAME = "Apache AsterixDB";
+    private static final String DEFAULT_PRODUCT_ABBREVIATION = "AsterixDB";
+
     private static final IIdentifierMapper DEFAULT_MAPPER = (identifier, modifier) -> {
         switch (identifier) {
             case DATASET:
                 switch (modifier) {
                     case NONE:
-                        return DATASET;
+                    case NONE_CAPITALIZED:
+                        return modifier.fixup(PLAIN_DATASET);
                     case SINGULAR:
-                        return SINGULAR_DATASET;
+                    case SINGULAR_CAPITALIZED:
+                        return modifier.fixup(SINGULAR_DATASET);
                     case PLURAL:
-                        return PLURAL_DATASET;
+                    case PLURAL_CAPITALIZED:
+                        return modifier.fixup(PLURAL_DATASET);
                     default:
                         throw new IllegalArgumentException("unknown modifier " + modifier);
                 }
             case DATAVERSE:
                 switch (modifier) {
                     case NONE:
-                        return DATAVERSE;
+                    case NONE_CAPITALIZED:
+                        return modifier.fixup(PLAIN_DATAVERSE);
                     case SINGULAR:
-                        return SINGULAR_DATAVERSE;
+                    case SINGULAR_CAPITALIZED:
+                        return modifier.fixup(SINGULAR_DATAVERSE);
                     case PLURAL:
-                        return PLURAL_DATAVERSE;
+                    case PLURAL_CAPITALIZED:
+                        return modifier.fixup(PLURAL_DATAVERSE);
                     default:
                         throw new IllegalArgumentException("unknown modifier " + modifier);
                 }
+            case PRODUCT_NAME:
+                return modifier.fixup(DEFAULT_PRODUCT_NAME);
+            case PRODUCT_ABBREVIATION:
+                return modifier.fixup(DEFAULT_PRODUCT_ABBREVIATION);
             default:
                 throw new IllegalArgumentException("unmapped identifier: " + identifier);
         }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
index 88b7190..addc192 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
@@ -22,10 +22,22 @@
 import static org.apache.asterix.common.api.IIdentifierMapper.Modifier;
 import static org.apache.asterix.common.api.IIdentifierMapper.Modifier.NONE;
 
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.asterix.common.api.IIdentifierMapper;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
 public class IdentifierUtil {
 
-    public static final String DATASET = "dataset";
-    public static final String DATAVERSE = "dataverse";
+    private static final Logger LOGGER = LogManager.getLogger();
+    private static final Pattern MESSAGE_IDENTIFIERS = Pattern.compile("@([A-Z_]*)(:([A-Z_]*))?@");
+
+    public static final String DATASET = "DATASET";
+    public static final String DATAVERSE = "DATAVERSE";
+    public static final String PRODUCT_NAME = "PRODUCT_NAME";
+    public static final String PRODUCT_ABBREVIATION = "PRODUCT_ABBREVIATION";
 
     public static String dataset() {
         return IdentifierMappingUtil.map(DATASET, NONE);
@@ -38,4 +50,41 @@
     public static String dataverse() {
         return IdentifierMappingUtil.map(DATAVERSE, NONE);
     }
+
+    public static String productName() {
+        return IdentifierMappingUtil.map(PRODUCT_NAME, NONE);
+    }
+
+    public static String productAbbreviation() {
+        return IdentifierMappingUtil.map(PRODUCT_ABBREVIATION, NONE);
+    }
+
+    public static String replaceIdentifiers(String input) {
+        if (input == null || input.isEmpty()) {
+            return input;
+        }
+        Matcher m = MESSAGE_IDENTIFIERS.matcher(input);
+        String replacement = m.replaceAll(mr -> {
+            String identifier = mr.group(1);
+            String modifierStr = mr.group(3);
+            IIdentifierMapper.Modifier modifier;
+            if (modifierStr != null) {
+                modifier = IIdentifierMapper.Modifier.valueOf(modifierStr);
+            } else {
+                modifier = IIdentifierMapper.Modifier.NONE;
+            }
+            return IdentifierMappingUtil.map(identifier, modifier);
+        });
+        if (!input.equals(replacement)) {
+            LOGGER.debug("{} -> {}", input, replacement);
+        }
+        return replacement;
+    }
+
+    public static String[] replaceIdentifiers(String[] input) {
+        for (int i = 0; i < input.length; i++) {
+            input[i] = IdentifierUtil.replaceIdentifiers(input[i]);
+        }
+        return input;
+    }
 }
diff --git a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
index 8e3ccff..adb26fe 100644
--- a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
+++ b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
@@ -306,6 +306,16 @@
 1199 = Fields '%1$s' and '%2$s' are incompatible for primary key
 1200 = Prefix should not start with "/". Prefix: '%1$s'
 1201 = Supported file format for 'delta' tables is 'parquet', but '%1$s' was provided.
+1202 = Unsupported compression scheme %1$s. Supported schemes for %2$s are %3$s
+1203 = Invalid schema provided: '%1$s'
+1204 = '%1$s' type not supported in parquet format
+1205 = Invalid Parquet Writer Version provided '%1$s'. Supported values: %2$s
+1206 = Storage units expected for the field '%1$s' (e.g., 0.1KB, 100kb, 1mb, 3MB, 8.5GB ...). Provided '%2$s'
+1207 = '%1$s' type not supported in csv format
+1208 = Invalid Copy to CSV schema
+1209 = Maximum value allowed for '%1$s' is %2$s. Found %3$s
+1210 = Retrieving storage size is not applicable to type: %1$s.
+1211 = Could not create delegation tokens
 
 # Feed Errors
 3001 = Illegal state.
@@ -427,6 +437,9 @@
 3121 = Parameter '%1$s' or '%2$s' is required if '%3$s' is provided
 3122 = Parameter '%1$s' is not allowed if '%2$s' is provided
 3123 = Type '%1$s' contains declared fields, which is not supported for 'avro' format
+3124 = '%1$s' is not a valid quote. The length of a quote should be 1
+3125 = '%1$s' is not a valid force-quote input. The length of a force-quote input should be 1 character
+3126 = '%1$s' is not a valid escape. The length of a escape should be 1
 
 # Lifecycle management errors
 4000 = Partition id %1$s for node %2$s already in use by node %3$s
diff --git a/asterixdb/asterix-doc/src/site/markdown/ncservice.md b/asterixdb/asterix-doc/src/site/markdown/ncservice.md
index c996e23..2352c02 100644
--- a/asterixdb/asterix-doc/src/site/markdown/ncservice.md
+++ b/asterixdb/asterix-doc/src/site/markdown/ncservice.md
@@ -344,6 +344,7 @@
 | common  | compiler.groupmemory                      | The memory budget (in bytes) for a group by operator instance in a partition | 33554432 (32 MB) |
 | common  | compiler.joinmemory                       | The memory budget (in bytes) for a join operator instance in a partition | 33554432 (32 MB) |
 | common  | compiler.parallelism                      | The degree of parallelism for query execution. Zero means to use the storage parallelism as the query execution parallelism, while other integer values dictate the number of query execution parallel partitions. The system will fall back to use the number of all available CPU cores in the cluster as the degree of parallelism if the number set by a user is too large or too small | 0 |
+| common  | compiler.orderfields                      | Enable/disbale select order fields in the response | false |
 | common  | compiler.sortmemory                       | The memory budget (in bytes) for a sort operator instance in a partition | 33554432 (32 MB) |
 | common  | compiler.sort.parallel                    | Enable full parallel sort for queries | true |
 | common  | compiler.sort.samples                     | The number of samples taken from each partition to guide the sort operation when full parallel sort is enabled | 100 |
diff --git a/asterixdb/asterix-external-data/pom.xml b/asterixdb/asterix-external-data/pom.xml
index 771929b..27c3ac1 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -420,8 +420,8 @@
       <artifactId>javax.activation</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.esri.geometry</groupId>
-      <artifactId>esri-geometry-api</artifactId>
+      <groupId>org.locationtech.jts</groupId>
+      <artifactId>jts-core</artifactId>
     </dependency>
     <dependency>
       <groupId>javax.xml.bind</groupId>
@@ -449,6 +449,10 @@
     </dependency>
     <dependency>
       <groupId>software.amazon.awssdk</groupId>
+      <artifactId>sts</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
       <artifactId>s3</artifactId>
     </dependency>
     <dependency>
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
index edd42f6..a27e325 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
@@ -23,10 +23,12 @@
 import static org.apache.hyracks.api.util.ExceptionUtils.getMessageOrToString;
 
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
+import java.util.UUID;
 
 import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.CompilationException;
@@ -51,6 +53,8 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.application.ICCServiceContext;
@@ -85,12 +89,25 @@
     private String nodeName;
     private Class recordReaderClazz;
     private IExternalFilterEvaluatorFactory filterEvaluatorFactory;
+    private transient Credentials credentials;
+    private byte[] serializedCredentials;
+    private transient UserGroupInformation ugi;
 
     @Override
     public void configure(IServiceContext serviceCtx, Map<String, String> configuration,
             IWarningCollector warningCollector, IExternalFilterEvaluatorFactory filterEvaluatorFactory)
             throws AlgebricksException, HyracksDataException {
         JobConf hdfsConf = prepareHDFSConf(serviceCtx, configuration, filterEvaluatorFactory);
+        credentials = HDFSUtils.configureHadoopAuthentication(configuration, hdfsConf);
+        try {
+            if (credentials != null) {
+                serializedCredentials = HDFSUtils.serialize(credentials);
+                ugi = UserGroupInformation.createRemoteUser(UUID.randomUUID().toString());
+                ugi.addCredentials(credentials);
+            }
+        } catch (IOException ex) {
+            throw HyracksDataException.create(ex);
+        }
         configureHdfsConf(hdfsConf, configuration);
     }
 
@@ -103,13 +120,20 @@
         return HDFSUtils.configureHDFSJobConf(configuration);
     }
 
-    protected void configureHdfsConf(JobConf conf, Map<String, String> configuration) throws AlgebricksException {
+    protected void configureHdfsConf(JobConf conf, Map<String, String> configuration)
+            throws AlgebricksException, HyracksDataException {
         String formatString = configuration.get(ExternalDataConstants.KEY_FORMAT);
         try {
             confFactory = new ConfFactory(conf);
             clusterLocations = getPartitionConstraint();
             int numPartitions = clusterLocations.getLocations().length;
-            InputSplit[] configInputSplits = getInputSplits(conf, numPartitions);
+            InputSplit[] configInputSplits;
+            if (credentials != null) {
+                configInputSplits =
+                        ugi.doAs((PrivilegedExceptionAction<InputSplit[]>) () -> getInputSplits(conf, numPartitions));
+            } else {
+                configInputSplits = getInputSplits(conf, numPartitions);
+            }
             readSchedule = hdfsScheduler.getLocationConstraints(configInputSplits);
             inputSplitsFactory = new InputSplitsFactory(configInputSplits);
             read = new boolean[readSchedule.length];
@@ -125,6 +149,8 @@
                 recordReaderClazz = StreamRecordReaderProvider.getRecordReaderClazz(configuration);
                 this.recordClass = char[].class;
             }
+        } catch (InterruptedException e) {
+            throw HyracksDataException.create(e);
         } catch (IOException e) {
             throw new CompilationException(ErrorCode.EXTERNAL_SOURCE_ERROR, e, getMessageOrToString(e));
         } catch (Exception e) {
@@ -157,7 +183,7 @@
     public AsterixInputStream createInputStream(IHyracksTaskContext ctx) throws HyracksDataException {
         try {
             restoreConfig(ctx);
-            return new HDFSInputStream(read, inputSplits, readSchedule, nodeName, conf, configuration);
+            return new HDFSInputStream(read, inputSplits, readSchedule, nodeName, conf, configuration, ugi);
         } catch (Exception e) {
             throw HyracksDataException.create(e);
         }
@@ -169,6 +195,16 @@
             inputSplits = inputSplitsFactory.getSplits();
             nodeName = ctx.getJobletContext().getServiceContext().getNodeId();
             configured = true;
+            try {
+                if (serializedCredentials != null) {
+                    credentials = new Credentials();
+                    HDFSUtils.deserialize(serializedCredentials, credentials);
+                    ugi = UserGroupInformation.createRemoteUser(UUID.randomUUID().toString());
+                    ugi.addCredentials(credentials);
+                }
+            } catch (IOException ex) {
+                throw HyracksDataException.create(ex);
+            }
         }
     }
 
@@ -237,7 +273,8 @@
                  */
                 readerConf = confFactory.getConf();
             }
-            return createRecordReader(configuration, read, inputSplits, readSchedule, nodeName, readerConf, context);
+            return createRecordReader(configuration, read, inputSplits, readSchedule, nodeName, readerConf, context,
+                    ugi);
         } catch (Exception e) {
             throw HyracksDataException.create(e);
         }
@@ -262,12 +299,12 @@
 
     private static IRecordReader<?> createRecordReader(Map<String, String> configuration, boolean[] read,
             InputSplit[] inputSplits, String[] readSchedule, String nodeName, JobConf conf,
-            IExternalDataRuntimeContext context) {
-        if (configuration.get(ExternalDataConstants.KEY_INPUT_FORMAT.trim())
+            IExternalDataRuntimeContext context, UserGroupInformation ugi) {
+        if (configuration.get(ExternalDataConstants.KEY_INPUT_FORMAT).trim()
                 .equals(ExternalDataConstants.INPUT_FORMAT_PARQUET)) {
-            return new ParquetFileRecordReader<>(read, inputSplits, readSchedule, nodeName, conf, context);
+            return new ParquetFileRecordReader<>(read, inputSplits, readSchedule, nodeName, conf, context, ugi);
         } else {
-            return new HDFSRecordReader<>(read, inputSplits, readSchedule, nodeName, conf);
+            return new HDFSRecordReader<>(read, inputSplits, readSchedule, nodeName, conf, ugi);
         }
     }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractHDFSRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractHDFSRecordReader.java
index 11241f1..04c80d1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractHDFSRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractHDFSRecordReader.java
@@ -29,6 +29,7 @@
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.security.UserGroupInformation;
 
 public abstract class AbstractHDFSRecordReader<K, V> implements IRecordReader<V> {
     protected RecordReader<K, V> reader;
@@ -43,9 +44,11 @@
     protected JobConf conf;
     protected IRawRecord<V> record;
     private boolean firstInputSplit;
+    protected UserGroupInformation ugi;
 
     public AbstractHDFSRecordReader(boolean[] read, InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-            JobConf conf) {
+            JobConf conf, UserGroupInformation ugi) {
+        this.ugi = ugi;
         this.read = read;
         this.inputSplits = inputSplits;
         this.readSchedule = readSchedule;
@@ -58,7 +61,8 @@
     }
 
     public AbstractHDFSRecordReader(boolean[] read, InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-            IRawRecord<V> record, JobConf conf) {
+            IRawRecord<V> record, JobConf conf, UserGroupInformation ugi) {
+        this.ugi = ugi;
         this.read = read;
         this.inputSplits = inputSplits;
         this.readSchedule = readSchedule;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
index 48b99d1..6d72fb5 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
@@ -19,24 +19,36 @@
 package org.apache.asterix.external.input.record.reader.hdfs;
 
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class HDFSRecordReader<K, V extends Writable> extends AbstractHDFSRecordReader<K, V> {
 
     public HDFSRecordReader(boolean[] read, InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-            JobConf conf) {
-        super(read, inputSplits, readSchedule, nodeName, conf);
+            JobConf conf, UserGroupInformation ugi) {
+        super(read, inputSplits, readSchedule, nodeName, conf, ugi);
     }
 
     @SuppressWarnings("unchecked")
     @Override
     protected RecordReader<K, V> getRecordReader(int splitIndex) throws IOException {
-        reader = (RecordReader<K, V>) inputFormat.getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
+        if (ugi != null) {
+            try {
+                reader = ugi.doAs((PrivilegedExceptionAction<RecordReader<K, V>>) () -> (RecordReader<K, V>) inputFormat
+                        .getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL));
+            } catch (InterruptedException ex) {
+                throw HyracksDataException.create(ex);
+            }
+        } else {
+            reader = (RecordReader<K, V>) inputFormat.getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
+        }
         if (key == null) {
             key = reader.createKey();
             value = reader.createValue();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java
index bb74cd6..a1f86a7 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/parquet/ParquetFileRecordReader.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.external.input.record.reader.hdfs.parquet;
 
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 
 import org.apache.asterix.external.api.IExternalDataRuntimeContext;
 import org.apache.asterix.external.input.record.ValueReferenceRecord;
@@ -28,6 +29,8 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.exceptions.IWarningCollector;
 import org.apache.hyracks.data.std.api.IValueReference;
 
@@ -39,8 +42,8 @@
     private final IWarningCollector warningCollector;
 
     public ParquetFileRecordReader(boolean[] read, InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-            JobConf conf, IExternalDataRuntimeContext context) {
-        super(read, inputSplits, readSchedule, nodeName, new ValueReferenceRecord<>(), conf);
+            JobConf conf, IExternalDataRuntimeContext context, UserGroupInformation ugi) {
+        super(read, inputSplits, readSchedule, nodeName, new ValueReferenceRecord<>(), conf, ugi);
         this.warningCollector = context.getTaskContext().getWarningCollector();
         ((MapredParquetInputFormat) inputFormat).setValueEmbedder(context.getValueEmbedder());
     }
@@ -61,11 +64,20 @@
     @Override
     protected RecordReader<Void, V> getRecordReader(int splitIndex) throws IOException {
         try {
-            ParquetRecordReaderWrapper readerWrapper = (ParquetRecordReaderWrapper) inputFormat
-                    .getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
+            ParquetRecordReaderWrapper readerWrapper;
+            if (ugi != null) {
+                readerWrapper = ugi
+                        .doAs((PrivilegedExceptionAction<ParquetRecordReaderWrapper>) () -> (ParquetRecordReaderWrapper) inputFormat
+                                .getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL));
+            } else {
+                readerWrapper = (ParquetRecordReaderWrapper) inputFormat.getRecordReader(inputSplits[splitIndex], conf,
+                        Reporter.NULL);
+            }
             reader = (RecordReader<Void, V>) readerWrapper;
         } catch (AsterixParquetRuntimeException e) {
             throw e.getHyracksDataException();
+        } catch (InterruptedException e) {
+            throw HyracksDataException.create(e);
         }
         if (value == null) {
             value = reader.createValue();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
index 46c2102..b81d815 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.external.input.stream;
 
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -31,6 +32,8 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class HDFSInputStream extends AsterixInputStream {
 
@@ -45,10 +48,13 @@
     private String nodeName;
     private JobConf conf;
     private int pos = 0;
+    private UserGroupInformation ugi;
 
     @SuppressWarnings("unchecked")
     public HDFSInputStream(boolean read[], InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-            JobConf conf, Map<String, String> configuration) throws IOException, AsterixException {
+            JobConf conf, Map<String, String> configuration, UserGroupInformation ugi)
+            throws IOException, AsterixException {
+        this.ugi = ugi;
         this.read = read;
         this.inputSplits = inputSplits;
         this.readSchedule = readSchedule;
@@ -164,7 +170,19 @@
 
     @SuppressWarnings("unchecked")
     private RecordReader<Object, Text> getRecordReader(int splitIndex) throws IOException {
-        reader = (RecordReader<Object, Text>) inputFormat.getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
+        if (ugi != null) {
+            try {
+                reader = ugi
+                        .doAs((PrivilegedExceptionAction<RecordReader<Object, Text>>) () -> (RecordReader<Object, Text>) inputFormat
+                                .getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL));
+            } catch (InterruptedException ex) {
+                throw HyracksDataException.create(ex);
+            }
+        } else {
+            reader = (RecordReader<Object, Text>) inputFormat.getRecordReader(inputSplits[splitIndex], conf,
+                    Reporter.NULL);
+        }
+
         if (key == null) {
             key = reader.createKey();
             value = reader.createValue();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index ffe75cb..202e131 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.util;
 
+import java.util.List;
 import java.util.Set;
 import java.util.function.LongSupplier;
 import java.util.function.Supplier;
@@ -81,9 +82,17 @@
     public static final String KEY_EXPRESSION = "expression";
     public static final String KEY_LOCAL_SOCKET_PATH = "local-socket-path";
     public static final String KEY_FORMAT = "format";
+    public static final String KEY_SCHEMA = "schema";
+    public static final String KEY_PARQUET_ROW_GROUP_SIZE = "row-group-size";
+    public static final String PARQUET_DEFAULT_ROW_GROUP_SIZE = "10MB";
+    public static final String KEY_PARQUET_PAGE_SIZE = "page-size";
+    public static final String PARQUET_DEFAULT_PAGE_SIZE = "8KB";
     public static final String KEY_INCLUDE = "include";
     public static final String KEY_EXCLUDE = "exclude";
     public static final String KEY_QUOTE = "quote";
+    public static final String KEY_FORCE_QUOTE = "force-quote";
+    public static final String KEY_EMPTY_FIELD_AS_NULL = "empty_field_as_null";
+    public static final String KEY_RECORD_DELIMITER = "record-delimiter";
     public static final String KEY_ESCAPE = "escape";
     public static final String KEY_PARSER = "parser";
     public static final String KEY_DATASET_RECORD = "dataset-record";
@@ -151,6 +160,7 @@
     public static final String KEY_ADAPTER_NAME_AZURE_BLOB = "AZUREBLOB";
     public static final String KEY_ADAPTER_NAME_AZURE_DATA_LAKE = "AZUREDATALAKE";
     public static final String KEY_ADAPTER_NAME_GCS = "GCS";
+    public static final String KEY_ADAPTER_NAME_HDFS = "HDFS";
 
     /**
      * HDFS class names
@@ -160,12 +170,37 @@
     public static final String CLASS_NAME_PARQUET_INPUT_FORMAT =
             "org.apache.asterix.external.input.record.reader.hdfs.parquet.MapredParquetInputFormat";
     public static final String CLASS_NAME_HDFS_FILESYSTEM = "org.apache.hadoop.hdfs.DistributedFileSystem";
+    public static final String S3A_CHANGE_DETECTION_REQUIRED = "requireVersionChangeDetection";
+    public static final String S3A_CHANGE_DETECTION_REQUIRED_CONFIG_KEY = "fs.s3a.change.detection.version.required";
     /**
      * input formats aliases
      */
     public static final String INPUT_FORMAT_TEXT = "text-input-format";
     public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
     public static final String INPUT_FORMAT_PARQUET = "parquet-input-format";
+
+    public static final String HDFS_BLOCKSIZE = "blocksize";
+    public static final String HDFS_REPLICATION = "replication";
+    public static final String HADOOP_AUTHENTICATION = "authentication";
+    public static final String KERBEROS_PROTOCOL = "kerberos";
+    public static final String KERBEROS_REALM = "realm";
+    public static final String KERBEROS_KDC = "kdc";
+    public static final String HDFS_USE_DATANODE_HOSTNAME = "use-datanode-hostname";
+    public static final String KERBEROS_PRINCIPAL = "principal";
+    public static final String KERBEROS_PASSWORD = "password";
+
+    public static final String KEY_HDFS_BLOCKSIZE = "dfs.blocksize";
+    public static final String KEY_HDFS_REPLICATION = "dfs.replication";
+    public static final String KEY_HADOOP_AUTHENTICATION = "hadoop.security.authentication";
+    public static final String KEY_KERBEROS_CONF = "java.security.krb5.conf";
+    public static final String KEY_NAMENODE_PRINCIPAL_PATTERN = "dfs.namenode.kerberos.principal.pattern";
+    public static final String KEY_HDFS_USE_DATANODE_HOSTNAME = "dfs.client.use.datanode.hostname";
+    public static final String KERBEROS_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule";
+    public static final String KERBEROS_CONFIG_REFRESH = "refreshKrb5Config";
+    public static final String KERBEROS_CONFIG_FILE_CONTENT =
+            "[libdefaults]\n\tdefault_realm = %1$s\n\n[realms]\n\t%1$s = {\n\t\tkdc = %2$s\n\t}";
+    public static final String[] KERBEROS_CONFIG_FILE_PATTERN = { "krb5", ".conf" };
+
     /**
      * Builtin streams
      */
@@ -180,6 +215,8 @@
     public static final String HAS_HEADER = "has.header";
     public static final String TIME_TRACKING = "time.tracking";
     public static final String DEFAULT_QUOTE = "\"";
+    public static final String DEFAULT_SINGLE_QUOTE = "'";
+    public static final String NONE = "none";
     public static final String NODE_RESOLVER_FACTORY_PROPERTY = "node.Resolver";
     public static final String DEFAULT_DELIMITER = ",";
     public static final String EXTERNAL_LIBRARY_SEPARATOR = "#";
@@ -191,6 +228,7 @@
     public static final String FORMAT_ADM = "adm";
     public static final String FORMAT_AVRO = "avro";
     public static final String FORMAT_JSON_LOWER_CASE = "json";
+    public static final String FORMAT_CSV_LOWER_CASE = "csv";
     public static final String FORMAT_JSON_UPPER_CASE = "JSON";
     public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
     public static final String FORMAT_TWEET = "twitter-status";
@@ -203,6 +241,13 @@
     public static final String FORMAT_CSV = "csv";
     public static final String FORMAT_TSV = "tsv";
     public static final String FORMAT_PARQUET = "parquet";
+    public static final String PARQUET_SCHEMA_KEY = "parquet-schema";
+    public static final String PARQUET_WRITER_VERSION_KEY = "version";
+    public static final String PARQUET_WRITER_VERSION_VALUE_1 = "1";
+    public static final String PARQUET_WRITER_VERSION_VALUE_2 = "2";
+    public static final String DUMMY_DATABASE_NAME = "dbname";
+    public static final String DUMMY_TYPE_NAME = "typeName";
+    public static final String DUMMY_DATAVERSE_NAME = "a.b.c";
     public static final String FORMAT_APACHE_ICEBERG = "apache-iceberg";
     public static final String FORMAT_DELTA = "delta";
     public static final Set<String> ALL_FORMATS;
@@ -306,6 +351,8 @@
      * Compression constants
      */
     public static final String KEY_COMPRESSION_GZIP = "gzip";
+    public static final String KEY_COMPRESSION_SNAPPY = "snappy";
+    public static final String KEY_COMPRESSION_ZSTD = "zstd";
     public static final String KEY_COMPRESSION_GZIP_COMPRESSION_LEVEL = "gzipCompressionLevel";
 
     /**
@@ -318,13 +365,28 @@
     public static final int WRITER_MAX_RESULT_MINIMUM = 1000;
     public static final Set<String> WRITER_SUPPORTED_FORMATS;
     public static final Set<String> WRITER_SUPPORTED_ADAPTERS;
-    public static final Set<String> WRITER_SUPPORTED_COMPRESSION;
+    public static final Set<String> TEXTUAL_WRITER_SUPPORTED_COMPRESSION;
+    public static final Set<String> PARQUET_WRITER_SUPPORTED_COMPRESSION;
+    public static final Set<String> PARQUET_WRITER_SUPPORTED_VERSION;
+    public static final int PARQUET_DICTIONARY_PAGE_SIZE = 1048576;
+    public static final List<String> WRITER_SUPPORTED_QUOTES;
+    public static final List<ATypeTag> CSV_WRITER_SUPPORTED_DATA_TYPES =
+            List.of(ATypeTag.TINYINT, ATypeTag.SMALLINT, ATypeTag.INTEGER, ATypeTag.BIGINT, ATypeTag.UINT8,
+                    ATypeTag.UINT16, ATypeTag.UINT64, ATypeTag.FLOAT, ATypeTag.DOUBLE, ATypeTag.STRING,
+                    ATypeTag.BOOLEAN, ATypeTag.DATETIME, ATypeTag.UINT32, ATypeTag.DATE, ATypeTag.TIME);
+    public static final String PARQUET_MAX_SCHEMAS_KEY = "max-schemas";
+    public static final int PARQUET_MAX_SCHEMAS_DEFAULT_VALUE = 5;
+    public static final int PARQUET_MAX_SCHEMAS_MAX_VALUE = 10;
 
     static {
-        WRITER_SUPPORTED_FORMATS = Set.of(FORMAT_JSON_LOWER_CASE);
+        WRITER_SUPPORTED_FORMATS = Set.of(FORMAT_JSON_LOWER_CASE, FORMAT_PARQUET, FORMAT_CSV_LOWER_CASE);
         WRITER_SUPPORTED_ADAPTERS = Set.of(ALIAS_LOCALFS_ADAPTER.toLowerCase(), KEY_ADAPTER_NAME_AWS_S3.toLowerCase(),
-                KEY_ADAPTER_NAME_GCS.toLowerCase());
-        WRITER_SUPPORTED_COMPRESSION = Set.of(KEY_COMPRESSION_GZIP);
+                KEY_ADAPTER_NAME_GCS.toLowerCase(), KEY_ADAPTER_NAME_HDFS.toLowerCase());
+        TEXTUAL_WRITER_SUPPORTED_COMPRESSION = Set.of(KEY_COMPRESSION_GZIP);
+        PARQUET_WRITER_SUPPORTED_COMPRESSION =
+                Set.of(KEY_COMPRESSION_GZIP, KEY_COMPRESSION_SNAPPY, KEY_COMPRESSION_ZSTD);
+        PARQUET_WRITER_SUPPORTED_VERSION = Set.of(PARQUET_WRITER_VERSION_VALUE_1, PARQUET_WRITER_VERSION_VALUE_2);
+        WRITER_SUPPORTED_QUOTES = List.of(DEFAULT_QUOTE, DEFAULT_SINGLE_QUOTE, NONE);
     }
 
     public static class DeltaOptions {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index bc5b8c3..950f3e2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -27,6 +27,7 @@
 import static org.apache.asterix.external.util.ExternalDataConstants.KEY_INCLUDE;
 import static org.apache.asterix.external.util.ExternalDataConstants.KEY_PATH;
 import static org.apache.asterix.external.util.ExternalDataConstants.KEY_QUOTE;
+import static org.apache.asterix.external.util.ExternalDataConstants.KEY_READER;
 import static org.apache.asterix.external.util.ExternalDataConstants.KEY_RECORD_END;
 import static org.apache.asterix.external.util.ExternalDataConstants.KEY_RECORD_START;
 import static org.apache.asterix.external.util.azure.blob_storage.AzureUtils.validateAzureBlobProperties;
@@ -437,6 +438,7 @@
     public static void defaultConfiguration(Map<String, String> configuration) {
         String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
         if (format != null) {
+            //todo:utsav
             // default quote, escape character for quote and fields delimiter for csv and tsv format
             if (format.equals(ExternalDataConstants.FORMAT_CSV)) {
                 configuration.putIfAbsent(KEY_DELIMITER, ExternalDataConstants.DEFAULT_DELIMITER);
@@ -598,12 +600,22 @@
     public static void validate(Map<String, String> configuration) throws HyracksDataException {
         String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
         String header = configuration.get(ExternalDataConstants.KEY_HEADER);
+        String forceQuote = configuration.get(ExternalDataConstants.KEY_FORCE_QUOTE);
+        String emptyFieldAsNull = configuration.get(ExternalDataConstants.KEY_EMPTY_FIELD_AS_NULL);
         if (format != null && isHeaderRequiredFor(format) && header == null) {
             throw new RuntimeDataException(ErrorCode.PARAMETERS_REQUIRED, ExternalDataConstants.KEY_HEADER);
         }
         if (header != null && !isBoolean(header)) {
             throw new RuntimeDataException(ErrorCode.INVALID_REQ_PARAM_VAL, ExternalDataConstants.KEY_HEADER, header);
         }
+        if (forceQuote != null && !isBoolean(forceQuote)) {
+            throw new RuntimeDataException(ErrorCode.INVALID_REQ_PARAM_VAL, ExternalDataConstants.KEY_FORCE_QUOTE,
+                    forceQuote);
+        }
+        if (emptyFieldAsNull != null && !isBoolean(emptyFieldAsNull)) {
+            throw new RuntimeDataException(ErrorCode.INVALID_REQ_PARAM_VAL,
+                    ExternalDataConstants.KEY_EMPTY_FIELD_AS_NULL, emptyFieldAsNull);
+        }
         char delimiter = validateGetDelimiter(configuration);
         validateGetQuote(configuration, delimiter);
         validateGetEscape(configuration, format);
@@ -674,6 +686,9 @@
             case ExternalDataConstants.KEY_ADAPTER_NAME_GCS:
                 validateProperties(configuration, srcLoc, collector);
                 break;
+            case ExternalDataConstants.KEY_ADAPTER_NAME_HDFS:
+                HDFSUtils.validateProperties(configuration, srcLoc, collector);
+                break;
             default:
                 // Nothing needs to be done
                 break;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
index f7638b4..2984954 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
@@ -18,8 +18,11 @@
  */
 package org.apache.asterix.external.util;
 
+import static org.apache.asterix.common.exceptions.ErrorCode.REQUIRED_PARAM_IF_PARAM_IS_PRESENT;
+import static org.apache.asterix.external.util.ExternalDataUtils.validateIncludeExclude;
 import static org.apache.asterix.om.utils.ProjectionFiltrationTypeUtil.ALL_FIELDS_TYPE;
 import static org.apache.asterix.om.utils.ProjectionFiltrationTypeUtil.EMPTY_TYPE;
+import static org.apache.hyracks.api.util.ExceptionUtils.getMessageOrToString;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -27,14 +30,29 @@
 import java.io.DataOutputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.nio.file.Files;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Base64;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
+
+import javax.security.auth.Subject;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.NameCallback;
+import javax.security.auth.callback.PasswordCallback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
 
 import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.indexing.ExternalFile;
@@ -45,29 +63,40 @@
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.runtime.projection.ExternalDatasetProjectionFiltrationInfo;
 import org.apache.asterix.runtime.projection.FunctionCallInformation;
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.context.ICCContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.exceptions.IWarningCollector;
+import org.apache.hyracks.api.exceptions.SourceLocation;
 import org.apache.hyracks.api.exceptions.Warning;
 import org.apache.hyracks.api.network.INetworkSecurityManager;
 import org.apache.hyracks.hdfs.scheduler.Scheduler;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.apache.parquet.hadoop.ParquetInputFormat;
 
 public class HDFSUtils {
 
+    private static final Logger LOGGER = LogManager.getLogger();
+
     private HDFSUtils() {
     }
 
@@ -218,9 +247,156 @@
             configureParquet(configuration, conf);
         }
 
+        if (configuration.containsKey(ExternalDataConstants.S3A_CHANGE_DETECTION_REQUIRED)) {
+            conf.set(ExternalDataConstants.S3A_CHANGE_DETECTION_REQUIRED_CONFIG_KEY,
+                    configuration.get(ExternalDataConstants.S3A_CHANGE_DETECTION_REQUIRED));
+        }
+
+        String useDatanodeHostname = configuration.get(ExternalDataConstants.HDFS_USE_DATANODE_HOSTNAME);
+        if (useDatanodeHostname != null) {
+            conf.set(ExternalDataConstants.KEY_HDFS_USE_DATANODE_HOSTNAME, useDatanodeHostname);
+        }
         return conf;
     }
 
+    public static Configuration configureHDFSwrite(Map<String, String> configuration) {
+        Configuration conf = new Configuration();
+        String url = configuration.get(ExternalDataConstants.KEY_HDFS_URL);
+        String blocksize = configuration.get(ExternalDataConstants.HDFS_BLOCKSIZE);
+        String replication = configuration.get(ExternalDataConstants.HDFS_REPLICATION);
+        String useDatanodeHostname = configuration.get(ExternalDataConstants.HDFS_USE_DATANODE_HOSTNAME);
+        if (url != null) {
+            conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_URI, url);
+        }
+        if (blocksize != null) {
+            conf.set(ExternalDataConstants.KEY_HDFS_BLOCKSIZE, blocksize);
+        }
+        if (replication != null) {
+            conf.set(ExternalDataConstants.KEY_HDFS_REPLICATION, replication);
+        }
+        if (useDatanodeHostname != null) {
+            conf.set(ExternalDataConstants.KEY_HDFS_USE_DATANODE_HOSTNAME, useDatanodeHostname);
+        }
+        return conf;
+    }
+
+    public synchronized static Credentials configureHadoopAuthentication(Map<String, String> configuration,
+            Configuration conf) throws AlgebricksException {
+        UserGroupInformation.reset();
+        if (Objects.equals(configuration.get(ExternalDataConstants.HADOOP_AUTHENTICATION),
+                ExternalDataConstants.KERBEROS_PROTOCOL)) {
+            conf.set(ExternalDataConstants.KEY_HADOOP_AUTHENTICATION, ExternalDataConstants.KERBEROS_PROTOCOL);
+            conf.set(ExternalDataConstants.KEY_NAMENODE_PRINCIPAL_PATTERN, "*");
+
+            String kerberosRealm = configuration.get(ExternalDataConstants.KERBEROS_REALM);
+            String kerberosKdc = configuration.get(ExternalDataConstants.KERBEROS_KDC);
+            String kerberosPrincipal = configuration.get(ExternalDataConstants.KERBEROS_PRINCIPAL);
+            String kerberosPassword = configuration.get(ExternalDataConstants.KERBEROS_PASSWORD);
+
+            javax.security.auth.login.Configuration config = new JaasConfiguration();
+            java.nio.file.Path krb5conf = null;
+            try {
+                krb5conf = createKerberosConf(kerberosRealm, kerberosKdc);
+                System.setProperty(ExternalDataConstants.KEY_KERBEROS_CONF, krb5conf.toString());
+
+                Subject subject = new Subject();
+                LoginContext loginContext =
+                        new LoginContext("", subject, new callbackHandler(kerberosPrincipal, kerberosPassword), config);
+                loginContext.login();
+
+                // Hadoop libraries use rules to ensure that a principal is associated with a realm.
+                // The default realm is static and needs to be modified in order to work with a different realm.
+                KerberosName.resetDefaultRealm();
+                UserGroupInformation.setConfiguration(conf);
+                UserGroupInformation ugi = UserGroupInformation.getUGIFromSubject(subject);
+                Credentials credentials = new Credentials();
+                ugi.doAs((PrivilegedExceptionAction<Void>) () -> {
+                    try (FileSystem fs = FileSystem.get(conf)) {
+                        fs.addDelegationTokens(ugi.getUserName(), credentials);
+                    }
+                    return null;
+                });
+                loginContext.logout();
+                return credentials;
+            } catch (InterruptedException ex) {
+                Thread.currentThread().interrupt();
+                throw CompilationException.create(ErrorCode.COULD_NOT_CREATE_TOKENS);
+            } catch (LoginException | IOException ex) {
+                throw new CompilationException(ErrorCode.EXTERNAL_SOURCE_ERROR, ex, getMessageOrToString(ex));
+            } finally {
+                System.clearProperty(ExternalDataConstants.KEY_KERBEROS_CONF);
+                UserGroupInformation.reset();
+                if (krb5conf != null) {
+                    FileUtils.deleteQuietly(krb5conf.toFile());
+                }
+            }
+        }
+        return null;
+    }
+
+    private static java.nio.file.Path createKerberosConf(String kerberosRealm, String kerberosKdc)
+            throws AlgebricksException {
+        java.nio.file.Path krb5conf;
+        try {
+            krb5conf = Files.createTempFile(ExternalDataConstants.KERBEROS_CONFIG_FILE_PATTERN[0],
+                    ExternalDataConstants.KERBEROS_CONFIG_FILE_PATTERN[1]);
+            Files.writeString(krb5conf,
+                    String.format(ExternalDataConstants.KERBEROS_CONFIG_FILE_CONTENT, kerberosRealm, kerberosKdc));
+        } catch (IOException ex) {
+            LOGGER.error("Failed to create kerberos config file", ex);
+            throw AsterixException.create(ErrorCode.COULD_NOT_CREATE_TOKENS);
+        }
+        return krb5conf;
+    }
+
+    private static class JaasConfiguration extends javax.security.auth.login.Configuration {
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+            Map<String, Object> options =
+                    Map.of(ExternalDataConstants.KERBEROS_CONFIG_REFRESH, ExternalDataConstants.TRUE);
+
+            return new AppConfigurationEntry[] { new AppConfigurationEntry(ExternalDataConstants.KERBEROS_LOGIN_MODULE,
+                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
+        }
+    }
+
+    public static byte[] serialize(Writable writable) throws IOException {
+        try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
+                DataOutputStream oos = new DataOutputStream(bos)) {
+            writable.write(oos);
+            return bos.toByteArray();
+        }
+    }
+
+    public static void deserialize(byte[] data, Writable writable) throws IOException {
+        try (DataInputStream din = new DataInputStream(new ByteArrayInputStream(data))) {
+            writable.readFields(din);
+        }
+    }
+
+    private static class callbackHandler implements CallbackHandler {
+        private final String principal;
+        private final String password;
+
+        public callbackHandler(String principal, String password) {
+            this.principal = principal;
+            this.password = password;
+        }
+
+        @Override
+        public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
+            for (Callback callback : callbacks) {
+                if (callback instanceof NameCallback) {
+                    ((NameCallback) callback).setName(principal);
+                } else if (callback instanceof PasswordCallback) {
+                    ((PasswordCallback) callback).setPassword(password.toCharArray());
+                } else {
+                    throw new UnsupportedCallbackException(callback);
+                }
+            }
+        }
+    }
+
     private static void configureParquet(Map<String, String> configuration, JobConf conf) {
         //Parquet configurations
         conf.set(ParquetInputFormat.READ_SUPPORT_CLASS, ParquetReadSupport.class.getName());
@@ -351,4 +527,41 @@
     public static boolean isEmpty(JobConf job) {
         return job.get(ExternalDataConstants.KEY_HADOOP_INPUT_DIR, "").isEmpty();
     }
+
+    public static void validateProperties(Map<String, String> configuration, SourceLocation srcLoc,
+            IWarningCollector collector) throws CompilationException {
+        if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
+            throw new CompilationException(ErrorCode.PARAMETERS_REQUIRED, srcLoc, ExternalDataConstants.KEY_FORMAT);
+        }
+        if (configuration.get(ExternalDataConstants.KEY_PATH) == null) {
+            throw new CompilationException(ErrorCode.PARAMETERS_REQUIRED, srcLoc, ExternalDataConstants.KEY_PATH);
+        }
+
+        if (Objects.equals(configuration.get(ExternalDataConstants.HADOOP_AUTHENTICATION),
+                ExternalDataConstants.KERBEROS_PROTOCOL)) {
+            String kerberosRealm = configuration.get(ExternalDataConstants.KERBEROS_REALM);
+            String kerberosKdc = configuration.get(ExternalDataConstants.KERBEROS_KDC);
+            String kerberosPrincipal = configuration.get(ExternalDataConstants.KERBEROS_PRINCIPAL);
+            String kerberosPassword = configuration.get(ExternalDataConstants.KERBEROS_PASSWORD);
+
+            if (kerberosRealm == null) {
+                throw CompilationException.create(REQUIRED_PARAM_IF_PARAM_IS_PRESENT,
+                        ExternalDataConstants.KERBEROS_REALM, ExternalDataConstants.HADOOP_AUTHENTICATION);
+            }
+            if (kerberosKdc == null) {
+                throw CompilationException.create(REQUIRED_PARAM_IF_PARAM_IS_PRESENT,
+                        ExternalDataConstants.KERBEROS_KDC, ExternalDataConstants.HADOOP_AUTHENTICATION);
+            }
+            if (kerberosPrincipal == null) {
+                throw CompilationException.create(REQUIRED_PARAM_IF_PARAM_IS_PRESENT,
+                        ExternalDataConstants.KERBEROS_PRINCIPAL, ExternalDataConstants.HADOOP_AUTHENTICATION);
+            }
+            if (kerberosPassword == null) {
+                throw CompilationException.create(REQUIRED_PARAM_IF_PARAM_IS_PRESENT,
+                        ExternalDataConstants.KERBEROS_PASSWORD, ExternalDataConstants.HADOOP_AUTHENTICATION);
+            }
+        }
+
+        validateIncludeExclude(configuration);
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java
index 843600e..380a9a8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java
@@ -19,8 +19,18 @@
 package org.apache.asterix.external.util;
 
 import static org.apache.asterix.common.exceptions.ErrorCode.INVALID_REQ_PARAM_VAL;
+import static org.apache.asterix.common.exceptions.ErrorCode.MAXIMUM_VALUE_ALLOWED_FOR_PARAM;
 import static org.apache.asterix.common.exceptions.ErrorCode.MINIMUM_VALUE_ALLOWED_FOR_PARAM;
+import static org.apache.asterix.common.exceptions.ErrorCode.PARAMETERS_REQUIRED;
+import static org.apache.asterix.external.util.ExternalDataConstants.FORMAT_CSV;
+import static org.apache.asterix.external.util.ExternalDataConstants.FORMAT_JSON_LOWER_CASE;
+import static org.apache.asterix.external.util.ExternalDataConstants.FORMAT_PARQUET;
+import static org.apache.asterix.external.util.ExternalDataConstants.KEY_PARQUET_PAGE_SIZE;
+import static org.apache.asterix.external.util.ExternalDataConstants.KEY_PARQUET_ROW_GROUP_SIZE;
 import static org.apache.asterix.external.util.ExternalDataConstants.KEY_WRITER_MAX_RESULT;
+import static org.apache.asterix.external.util.ExternalDataConstants.PARQUET_MAX_SCHEMAS_KEY;
+import static org.apache.asterix.external.util.ExternalDataConstants.PARQUET_MAX_SCHEMAS_MAX_VALUE;
+import static org.apache.asterix.external.util.ExternalDataConstants.PARQUET_WRITER_VERSION_KEY;
 import static org.apache.asterix.external.util.ExternalDataConstants.WRITER_MAX_RESULT_MINIMUM;
 
 import java.util.List;
@@ -31,6 +41,7 @@
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.util.StorageUtil;
 
 public class WriterValidationUtil {
 
@@ -41,10 +52,18 @@
             Map<String, String> configuration, SourceLocation sourceLocation) throws CompilationException {
         validateAdapter(adapter, supportedAdapters, sourceLocation);
         validateFormat(configuration, sourceLocation);
-        validateCompression(configuration, sourceLocation);
         validateMaxResult(configuration, sourceLocation);
     }
 
+    private static void validateQuote(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        String quote = configuration.get(ExternalDataConstants.KEY_QUOTE);
+        if (quote != null && !ExternalDataConstants.WRITER_SUPPORTED_QUOTES.contains(quote.toLowerCase())) {
+            throw CompilationException.create(ErrorCode.INVALID_QUOTE, sourceLocation, quote,
+                    ExternalDataConstants.WRITER_SUPPORTED_QUOTES.toString());
+        }
+    }
+
     private static void validateAdapter(String adapter, Set<String> supportedAdapters, SourceLocation sourceLocation)
             throws CompilationException {
         checkSupported(ExternalDataConstants.KEY_EXTERNAL_SOURCE_TYPE, adapter, supportedAdapters,
@@ -56,14 +75,87 @@
         String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
         checkSupported(ExternalDataConstants.KEY_FORMAT, format, ExternalDataConstants.WRITER_SUPPORTED_FORMATS,
                 ErrorCode.UNSUPPORTED_WRITING_FORMAT, sourceLocation, false);
+        switch (format.toLowerCase()) {
+            case FORMAT_JSON_LOWER_CASE:
+                validateJSON(configuration, sourceLocation);
+                break;
+            case FORMAT_PARQUET:
+                validateParquet(configuration, sourceLocation);
+                break;
+            case FORMAT_CSV:
+                validateCSV(configuration, sourceLocation);
+                break;
+        }
     }
 
-    private static void validateCompression(Map<String, String> configuration, SourceLocation sourceLocation)
+    private static void validateParquet(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        validateParquetCompression(configuration, sourceLocation);
+        validateParquetRowGroupSize(configuration);
+        validateParquetPageSize(configuration);
+        validateVersion(configuration, sourceLocation);
+        validateMaxParquetSchemas(configuration, sourceLocation);
+    }
+
+    private static void validateVersion(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        String version = configuration.get(PARQUET_WRITER_VERSION_KEY);
+        checkSupported(PARQUET_WRITER_VERSION_KEY, version, ExternalDataConstants.PARQUET_WRITER_SUPPORTED_VERSION,
+                ErrorCode.INVALID_PARQUET_WRITER_VERSION, sourceLocation, true);
+    }
+
+    private static void validateParquetRowGroupSize(Map<String, String> configuration) throws CompilationException {
+        String rowGroupSize = configuration.get(KEY_PARQUET_ROW_GROUP_SIZE);
+        if (rowGroupSize == null)
+            return;
+        try {
+            StorageUtil.getByteValue(rowGroupSize);
+        } catch (IllegalArgumentException e) {
+            throw CompilationException.create(ErrorCode.ILLEGAL_SIZE_PROVIDED, KEY_PARQUET_ROW_GROUP_SIZE,
+                    rowGroupSize);
+        }
+    }
+
+    private static void validateParquetPageSize(Map<String, String> configuration) throws CompilationException {
+        String pageSize = configuration.get(KEY_PARQUET_PAGE_SIZE);
+        if (pageSize == null)
+            return;
+        try {
+            StorageUtil.getByteValue(pageSize);
+        } catch (IllegalArgumentException e) {
+            throw CompilationException.create(ErrorCode.ILLEGAL_SIZE_PROVIDED, KEY_PARQUET_PAGE_SIZE, pageSize);
+        }
+    }
+
+    private static void validateJSON(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        validateTextualCompression(configuration, sourceLocation);
+    }
+
+    private static void validateCSV(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        validateTextualCompression(configuration, sourceLocation);
+        validateDelimiter(configuration, sourceLocation);
+        validateRecordDelimiter(configuration, sourceLocation);
+        validateQuote(configuration, sourceLocation);
+        validateEscape(configuration, sourceLocation);
+    }
+
+    private static void validateParquetCompression(Map<String, String> configuration, SourceLocation sourceLocation)
             throws CompilationException {
         String compression = configuration.get(ExternalDataConstants.KEY_WRITER_COMPRESSION);
-        checkSupported(ExternalDataConstants.KEY_WRITER_COMPRESSION, compression,
-                ExternalDataConstants.WRITER_SUPPORTED_COMPRESSION, ErrorCode.UNKNOWN_COMPRESSION_SCHEME,
-                sourceLocation, true);
+        checkCompressionSupported(ExternalDataConstants.KEY_WRITER_COMPRESSION, compression,
+                ExternalDataConstants.PARQUET_WRITER_SUPPORTED_COMPRESSION,
+                ErrorCode.UNSUPPORTED_WRITER_COMPRESSION_SCHEME, sourceLocation, FORMAT_PARQUET, true);
+    }
+
+    private static void validateTextualCompression(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        String compression = configuration.get(ExternalDataConstants.KEY_WRITER_COMPRESSION);
+        checkCompressionSupported(ExternalDataConstants.KEY_WRITER_COMPRESSION, compression,
+                ExternalDataConstants.TEXTUAL_WRITER_SUPPORTED_COMPRESSION,
+                ErrorCode.UNSUPPORTED_WRITER_COMPRESSION_SCHEME, sourceLocation,
+                configuration.get(ExternalDataConstants.KEY_FORMAT), true);
         if (ExternalDataUtils.isGzipCompression(compression)) {
             validateGzipCompressionLevel(configuration, sourceLocation);
         }
@@ -87,6 +179,24 @@
         }
     }
 
+    private static void validateMaxParquetSchemas(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        String maxResult = configuration.get(PARQUET_MAX_SCHEMAS_KEY);
+        if (maxResult == null) {
+            return;
+        }
+
+        try {
+            int value = Integer.parseInt(maxResult);
+            if (value > PARQUET_MAX_SCHEMAS_MAX_VALUE) {
+                throw new CompilationException(MAXIMUM_VALUE_ALLOWED_FOR_PARAM, PARQUET_MAX_SCHEMAS_KEY,
+                        PARQUET_MAX_SCHEMAS_MAX_VALUE, value);
+            }
+        } catch (NumberFormatException e) {
+            throw CompilationException.create(ErrorCode.INTEGER_VALUE_EXPECTED, sourceLocation, maxResult);
+        }
+    }
+
     private static void checkSupported(String paramKey, String value, Set<String> supportedSet, ErrorCode errorCode,
             SourceLocation sourceLocation, boolean optional) throws CompilationException {
         if (optional && value == null) {
@@ -94,7 +204,7 @@
         }
 
         if (value == null) {
-            throw new CompilationException(ErrorCode.PARAMETERS_REQUIRED, sourceLocation, paramKey);
+            throw new CompilationException(PARAMETERS_REQUIRED, sourceLocation, paramKey);
         }
 
         String normalizedValue = value.toLowerCase();
@@ -121,4 +231,49 @@
         }
     }
 
+    private static void checkCompressionSupported(String paramKey, String value, Set<String> supportedSet,
+            ErrorCode errorCode, SourceLocation sourceLocation, String format, boolean optional)
+            throws CompilationException {
+        if (optional && value == null) {
+            return;
+        }
+
+        if (value == null) {
+            throw new CompilationException(PARAMETERS_REQUIRED, sourceLocation, paramKey);
+        }
+
+        String normalizedValue = value.toLowerCase();
+        if (!supportedSet.contains(normalizedValue)) {
+            List<String> sorted = supportedSet.stream().sorted().collect(Collectors.toList());
+            throw CompilationException.create(errorCode, sourceLocation, value, format, sorted.toString());
+        }
+    }
+
+    private static void validateDelimiter(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        // Will this affect backward compatibility
+        String delimiter = configuration.get(ExternalDataConstants.KEY_DELIMITER);
+        unitByteCondition(delimiter, sourceLocation, ErrorCode.INVALID_DELIMITER);
+    }
+
+    private static void validateEscape(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        // Will this affect backward compatibility?
+        String escape = configuration.get(ExternalDataConstants.KEY_ESCAPE);
+        unitByteCondition(escape, sourceLocation, ErrorCode.INVALID_ESCAPE);
+    }
+
+    private static void validateRecordDelimiter(Map<String, String> configuration, SourceLocation sourceLocation)
+            throws CompilationException {
+        String recordDel = configuration.get(ExternalDataConstants.KEY_RECORD_DELIMITER);
+        unitByteCondition(recordDel, sourceLocation, ErrorCode.INVALID_FORCE_QUOTE);
+    }
+
+    private static void unitByteCondition(String param, SourceLocation sourceLocation, ErrorCode errorCode)
+            throws CompilationException {
+        if (param != null && param.length() > 1 && param.getBytes().length != 1) {
+            throw CompilationException.create(errorCode, sourceLocation, param);
+        }
+    }
+
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Constants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Constants.java
index a62b346..126c868 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Constants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Constants.java
@@ -28,6 +28,8 @@
     public static final String ACCESS_KEY_ID_FIELD_NAME = "accessKeyId";
     public static final String SECRET_ACCESS_KEY_FIELD_NAME = "secretAccessKey";
     public static final String SESSION_TOKEN_FIELD_NAME = "sessionToken";
+    public static final String ROLE_ARN_FIELD_NAME = "roleArn";
+    public static final String EXTERNAL_ID_FIELD_NAME = "externalId";
     public static final String SERVICE_END_POINT_FIELD_NAME = "serviceEndpoint";
 
     // AWS S3 specific error codes
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Utils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Utils.java
index 891d7f3..3cfccb4 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Utils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/aws/s3/S3Utils.java
@@ -30,6 +30,7 @@
 import static org.apache.asterix.external.util.aws.s3.S3Constants.ERROR_INTERNAL_ERROR;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.ERROR_METHOD_NOT_IMPLEMENTED;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.ERROR_SLOW_DOWN;
+import static org.apache.asterix.external.util.aws.s3.S3Constants.EXTERNAL_ID_FIELD_NAME;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.HADOOP_ACCESS_KEY_ID;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.HADOOP_ANONYMOUS_ACCESS;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.HADOOP_CREDENTIAL_PROVIDER_KEY;
@@ -42,6 +43,7 @@
 import static org.apache.asterix.external.util.aws.s3.S3Constants.HADOOP_TEMP_ACCESS;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.INSTANCE_PROFILE_FIELD_NAME;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.REGION_FIELD_NAME;
+import static org.apache.asterix.external.util.aws.s3.S3Constants.ROLE_ARN_FIELD_NAME;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.SECRET_ACCESS_KEY_FIELD_NAME;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.SERVICE_END_POINT_FIELD_NAME;
 import static org.apache.asterix.external.util.aws.s3.S3Constants.SESSION_TOKEN_FIELD_NAME;
@@ -54,6 +56,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
+import java.util.UUID;
 import java.util.function.BiPredicate;
 import java.util.regex.Matcher;
 
@@ -93,6 +96,10 @@
 import software.amazon.awssdk.services.s3.model.S3Object;
 import software.amazon.awssdk.services.s3.model.S3Response;
 import software.amazon.awssdk.services.s3.paginators.ListObjectsV2Iterable;
+import software.amazon.awssdk.services.sts.StsClient;
+import software.amazon.awssdk.services.sts.model.AssumeRoleRequest;
+import software.amazon.awssdk.services.sts.model.AssumeRoleResponse;
+import software.amazon.awssdk.services.sts.model.Credentials;
 
 public class S3Utils {
     private S3Utils() {
@@ -111,31 +118,16 @@
      * @throws CompilationException CompilationException
      */
     public static S3Client buildAwsS3Client(Map<String, String> configuration) throws CompilationException {
-        // TODO(Hussain): Need to ensure that all required parameters are present in a previous step
-        String instanceProfile = configuration.get(INSTANCE_PROFILE_FIELD_NAME);
-        String accessKeyId = configuration.get(ACCESS_KEY_ID_FIELD_NAME);
-        String secretAccessKey = configuration.get(SECRET_ACCESS_KEY_FIELD_NAME);
-        String sessionToken = configuration.get(SESSION_TOKEN_FIELD_NAME);
         String regionId = configuration.get(REGION_FIELD_NAME);
         String serviceEndpoint = configuration.get(SERVICE_END_POINT_FIELD_NAME);
 
+        Region region = validateAndGetRegion(regionId);
+        AwsCredentialsProvider credentialsProvider = buildCredentialsProvider(configuration);
+
         S3ClientBuilder builder = S3Client.builder();
-
-        // Credentials
-        AwsCredentialsProvider credentialsProvider =
-                buildCredentialsProvider(instanceProfile, accessKeyId, secretAccessKey, sessionToken);
-
+        builder.region(region);
         builder.credentialsProvider(credentialsProvider);
 
-        // Validate the region
-        List<Region> regions = S3Client.serviceMetadata().regions();
-        Optional<Region> selectedRegion = regions.stream().filter(region -> region.id().equals(regionId)).findFirst();
-
-        if (selectedRegion.isEmpty()) {
-            throw new CompilationException(S3_REGION_NOT_SUPPORTED, regionId);
-        }
-        builder.region(selectedRegion.get());
-
         // Validate the service endpoint if present
         if (serviceEndpoint != null) {
             try {
@@ -154,61 +146,32 @@
         return builder.build();
     }
 
-    public static AwsCredentialsProvider buildCredentialsProvider(String instanceProfile, String accessKeyId,
-            String secretAccessKey, String sessionToken) throws CompilationException {
+    public static AwsCredentialsProvider buildCredentialsProvider(Map<String, String> configuration)
+            throws CompilationException {
+        String arnRole = configuration.get(ROLE_ARN_FIELD_NAME);
+        String externalId = configuration.get(EXTERNAL_ID_FIELD_NAME);
+        String instanceProfile = configuration.get(INSTANCE_PROFILE_FIELD_NAME);
+        String accessKeyId = configuration.get(ACCESS_KEY_ID_FIELD_NAME);
+        String secretAccessKey = configuration.get(SECRET_ACCESS_KEY_FIELD_NAME);
 
-        // Credentials
-        AwsCredentialsProvider credentialsProvider;
-
-        // nothing provided, anonymous authentication
-        if (instanceProfile == null && accessKeyId == null && secretAccessKey == null && sessionToken == null) {
-            credentialsProvider = AnonymousCredentialsProvider.create();
+        if (noAuth(configuration)) {
+            return AnonymousCredentialsProvider.create();
+        } else if (arnRole != null) {
+            // TODO: Do auth validation and use existing credentials if exist already, if not, assume the role
+            return validateAndGetTrustAccountAuthentication(configuration);
         } else if (instanceProfile != null) {
-
-            // only "true" value is allowed
-            if (!instanceProfile.equalsIgnoreCase("true")) {
-                throw new CompilationException(INVALID_PARAM_VALUE_ALLOWED_VALUE, INSTANCE_PROFILE_FIELD_NAME, "true");
-            }
-
-            // no other authentication parameters are allowed
-            if (accessKeyId != null) {
-                throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, ACCESS_KEY_ID_FIELD_NAME,
-                        INSTANCE_PROFILE_FIELD_NAME);
-            }
-            if (secretAccessKey != null) {
-                throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, SECRET_ACCESS_KEY_FIELD_NAME,
-                        INSTANCE_PROFILE_FIELD_NAME);
-            }
-            if (sessionToken != null) {
-                throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, SESSION_TOKEN_FIELD_NAME,
-                        INSTANCE_PROFILE_FIELD_NAME);
-            }
-            credentialsProvider = InstanceProfileCredentialsProvider.create();
+            return validateAndGetInstanceProfileAuthentication(configuration);
         } else if (accessKeyId != null || secretAccessKey != null) {
-            // accessKeyId authentication
-            if (accessKeyId == null) {
-                throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, ACCESS_KEY_ID_FIELD_NAME,
-                        SECRET_ACCESS_KEY_FIELD_NAME);
-            }
-            if (secretAccessKey == null) {
-                throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, SECRET_ACCESS_KEY_FIELD_NAME,
-                        ACCESS_KEY_ID_FIELD_NAME);
-            }
-
-            // use session token if provided
-            if (sessionToken != null) {
-                credentialsProvider = StaticCredentialsProvider
-                        .create(AwsSessionCredentials.create(accessKeyId, secretAccessKey, sessionToken));
-            } else {
-                credentialsProvider =
-                        StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccessKey));
-            }
+            return validateAndGetAccessKeysAuthentications(configuration);
         } else {
-            // if only session token is provided, accessKeyId is required
-            throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, ACCESS_KEY_ID_FIELD_NAME,
-                    SESSION_TOKEN_FIELD_NAME);
+            if (externalId != null) {
+                throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, ROLE_ARN_FIELD_NAME,
+                        EXTERNAL_ID_FIELD_NAME);
+            } else {
+                throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, ACCESS_KEY_ID_FIELD_NAME,
+                        SESSION_TOKEN_FIELD_NAME);
+            }
         }
-        return credentialsProvider;
     }
 
     /**
@@ -282,10 +245,22 @@
             throw new CompilationException(ErrorCode.PARAMETERS_REQUIRED, srcLoc, ExternalDataConstants.KEY_FORMAT);
         }
 
-        // Both parameters should be passed, or neither should be passed (for anonymous/no auth)
+        String arnRole = configuration.get(ROLE_ARN_FIELD_NAME);
+        String externalId = configuration.get(EXTERNAL_ID_FIELD_NAME);
         String accessKeyId = configuration.get(ACCESS_KEY_ID_FIELD_NAME);
         String secretAccessKey = configuration.get(SECRET_ACCESS_KEY_FIELD_NAME);
-        if (accessKeyId == null || secretAccessKey == null) {
+
+        if (arnRole != null) {
+            String notAllowed = getNonNull(configuration, ACCESS_KEY_ID_FIELD_NAME, SECRET_ACCESS_KEY_FIELD_NAME,
+                    SESSION_TOKEN_FIELD_NAME);
+            if (notAllowed != null) {
+                throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, notAllowed,
+                        INSTANCE_PROFILE_FIELD_NAME);
+            }
+        } else if (externalId != null) {
+            throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, ROLE_ARN_FIELD_NAME,
+                    EXTERNAL_ID_FIELD_NAME);
+        } else if (accessKeyId == null || secretAccessKey == null) {
             // If one is passed, the other is required
             if (accessKeyId != null) {
                 throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, SECRET_ACCESS_KEY_FIELD_NAME,
@@ -528,7 +503,7 @@
     }
 
     public static Map<String, List<String>> S3ObjectsOfSingleDepth(Map<String, String> configuration, String container,
-            String prefix) throws CompilationException, HyracksDataException {
+            String prefix) throws CompilationException {
         // create s3 client
         S3Client s3Client = buildAwsS3Client(configuration);
         // fetch all the s3 objects
@@ -543,7 +518,7 @@
      * @param prefix                definition prefix
      */
     private static Map<String, List<String>> listS3ObjectsOfSingleDepth(S3Client s3Client, String container,
-            String prefix) throws HyracksDataException {
+            String prefix) {
         Map<String, List<String>> allObjects = new HashMap<>();
         ListObjectsV2Iterable listObjectsInterable;
         ListObjectsV2Request.Builder listObjectsBuilder =
@@ -580,4 +555,116 @@
         allObjects.put("folders", folders);
         return allObjects;
     }
+
+    public static Region validateAndGetRegion(String regionId) throws CompilationException {
+        List<Region> regions = S3Client.serviceMetadata().regions();
+        Optional<Region> selectedRegion = regions.stream().filter(region -> region.id().equals(regionId)).findFirst();
+
+        if (selectedRegion.isEmpty()) {
+            throw new CompilationException(S3_REGION_NOT_SUPPORTED, regionId);
+        }
+        return selectedRegion.get();
+    }
+
+    // TODO(htowaileb): Currently, trust-account is always assuming we have instance profile setup in place
+    private static AwsCredentialsProvider validateAndGetTrustAccountAuthentication(Map<String, String> configuration)
+            throws CompilationException {
+        String notAllowed = getNonNull(configuration, ACCESS_KEY_ID_FIELD_NAME, SECRET_ACCESS_KEY_FIELD_NAME,
+                SESSION_TOKEN_FIELD_NAME);
+        if (notAllowed != null) {
+            throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, notAllowed,
+                    INSTANCE_PROFILE_FIELD_NAME);
+        }
+
+        String regionId = configuration.get(REGION_FIELD_NAME);
+        String arnRole = configuration.get(ROLE_ARN_FIELD_NAME);
+        String externalId = configuration.get(EXTERNAL_ID_FIELD_NAME);
+        Region region = validateAndGetRegion(regionId);
+
+        AssumeRoleRequest.Builder builder = AssumeRoleRequest.builder();
+        builder.roleArn(arnRole);
+        builder.roleSessionName(UUID.randomUUID().toString());
+        builder.durationSeconds(900); // minimum role assume duration = 900 seconds (15 minutes), make configurable?
+        if (externalId != null) {
+            builder.externalId(externalId);
+        }
+        AssumeRoleRequest request = builder.build();
+        AwsCredentialsProvider credentialsProvider = validateAndGetInstanceProfileAuthentication(configuration);
+
+        // TODO(htowaileb): We shouldn't assume role with each request, rather stored the received temporary credentials
+        // and refresh when expired.
+        // assume the role from the provided arn
+        try (StsClient stsClient =
+                StsClient.builder().region(region).credentialsProvider(credentialsProvider).build()) {
+            AssumeRoleResponse response = stsClient.assumeRole(request);
+            Credentials credentials = response.credentials();
+            return StaticCredentialsProvider.create(AwsSessionCredentials.create(credentials.accessKeyId(),
+                    credentials.secretAccessKey(), credentials.sessionToken()));
+        } catch (SdkException ex) {
+            throw new CompilationException(ErrorCode.EXTERNAL_SOURCE_ERROR, ex, getMessageOrToString(ex));
+        }
+    }
+
+    private static AwsCredentialsProvider validateAndGetInstanceProfileAuthentication(Map<String, String> configuration)
+            throws CompilationException {
+        String instanceProfile = configuration.get(INSTANCE_PROFILE_FIELD_NAME);
+
+        // only "true" value is allowed
+        if (!"true".equalsIgnoreCase(instanceProfile)) {
+            throw new CompilationException(INVALID_PARAM_VALUE_ALLOWED_VALUE, INSTANCE_PROFILE_FIELD_NAME, "true");
+        }
+
+        String notAllowed = getNonNull(configuration, ACCESS_KEY_ID_FIELD_NAME, SECRET_ACCESS_KEY_FIELD_NAME,
+                SESSION_TOKEN_FIELD_NAME);
+        if (notAllowed != null) {
+            throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, notAllowed,
+                    INSTANCE_PROFILE_FIELD_NAME);
+        }
+        return InstanceProfileCredentialsProvider.create();
+    }
+
+    private static AwsCredentialsProvider validateAndGetAccessKeysAuthentications(Map<String, String> configuration)
+            throws CompilationException {
+        String accessKeyId = configuration.get(ACCESS_KEY_ID_FIELD_NAME);
+        String secretAccessKey = configuration.get(SECRET_ACCESS_KEY_FIELD_NAME);
+        String sessionToken = configuration.get(SESSION_TOKEN_FIELD_NAME);
+
+        // accessKeyId authentication
+        if (accessKeyId == null) {
+            throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, ACCESS_KEY_ID_FIELD_NAME,
+                    SECRET_ACCESS_KEY_FIELD_NAME);
+        }
+        if (secretAccessKey == null) {
+            throw new CompilationException(REQUIRED_PARAM_IF_PARAM_IS_PRESENT, SECRET_ACCESS_KEY_FIELD_NAME,
+                    ACCESS_KEY_ID_FIELD_NAME);
+        }
+
+        String notAllowed = getNonNull(configuration, EXTERNAL_ID_FIELD_NAME);
+        if (notAllowed != null) {
+            throw new CompilationException(PARAM_NOT_ALLOWED_IF_PARAM_IS_PRESENT, notAllowed,
+                    INSTANCE_PROFILE_FIELD_NAME);
+        }
+
+        // use session token if provided
+        if (sessionToken != null) {
+            return StaticCredentialsProvider
+                    .create(AwsSessionCredentials.create(accessKeyId, secretAccessKey, sessionToken));
+        } else {
+            return StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccessKey));
+        }
+    }
+
+    private static boolean noAuth(Map<String, String> configuration) {
+        return getNonNull(configuration, INSTANCE_PROFILE_FIELD_NAME, ROLE_ARN_FIELD_NAME, EXTERNAL_ID_FIELD_NAME,
+                ACCESS_KEY_ID_FIELD_NAME, SECRET_ACCESS_KEY_FIELD_NAME, SESSION_TOKEN_FIELD_NAME) == null;
+    }
+
+    private static String getNonNull(Map<String, String> configuration, String... fieldNames) {
+        for (String fieldName : fieldNames) {
+            if (configuration.get(fieldName) != null) {
+                return fieldName;
+            }
+        }
+        return null;
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java
index f2dbde7..6314ce8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java
@@ -26,6 +26,7 @@
     public static final String APPLICATION_DEFAULT_CREDENTIALS_FIELD_NAME = "applicationDefaultCredentials";
     public static final String JSON_CREDENTIALS_FIELD_NAME = "jsonCredentials";
     public static final String ENDPOINT_FIELD_NAME = "endpoint";
+    public static final String STORAGE_PREFIX = "prefix";
 
     /*
      * Hadoop internal configuration
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/HDFSExternalFileWriter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/HDFSExternalFileWriter.java
new file mode 100644
index 0000000..18865d0
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/HDFSExternalFileWriter.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer;
+
+import java.io.IOException;
+
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.runtime.writer.IExternalFileWriter;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.data.std.api.IValueReference;
+
+public class HDFSExternalFileWriter implements IExternalFileWriter {
+
+    private final IExternalPrinter printer;
+    private final FileSystem fs;
+    private final boolean partitionedPath;
+    private final SourceLocation pathSourceLocation;
+    private FSDataOutputStream outputStream = null;
+
+    HDFSExternalFileWriter(IExternalPrinter printer, FileSystem fs, boolean partitionedPath,
+            SourceLocation pathSourceLocation) {
+        this.printer = printer;
+        this.fs = fs;
+        this.partitionedPath = partitionedPath;
+        this.pathSourceLocation = pathSourceLocation;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        printer.open();
+    }
+
+    @Override
+    public void validate(String directory) throws HyracksDataException {
+        if (partitionedPath) {
+            Path dirPath = new Path(directory);
+            try {
+                if (fs.exists(dirPath)) {
+                    FileStatus fileStatus = fs.getFileStatus(dirPath);
+                    if (fileStatus.isFile()) {
+                        throw new RuntimeDataException(ErrorCode.DIRECTORY_IS_NOT_EMPTY, pathSourceLocation, directory);
+                    }
+                    if (fileStatus.isDirectory()) {
+                        FileStatus[] fileStatuses = fs.listStatus(dirPath);
+                        if (fileStatuses.length != 0) {
+                            throw new RuntimeDataException(ErrorCode.DIRECTORY_IS_NOT_EMPTY, pathSourceLocation,
+                                    directory);
+                        }
+                    }
+                }
+            } catch (IOException ex) {
+                throw HyracksDataException.create(ex);
+            }
+        }
+    }
+
+    @Override
+    public boolean newFile(String directory, String fileName) throws HyracksDataException {
+        Path path = new Path(directory, fileName);
+        try {
+            outputStream = fs.create(path, false);
+            printer.newStream(outputStream);
+        } catch (FileAlreadyExistsException e) {
+            return false;
+        } catch (IOException e) {
+            throw HyracksDataException.create(e);
+        }
+        return true;
+    }
+
+    @Override
+    public void write(IValueReference value) throws HyracksDataException {
+        printer.print(value);
+    }
+
+    @Override
+    public void abort() throws HyracksDataException {
+        if (outputStream != null) {
+            outputStream.abort();
+        }
+        printer.close();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        printer.close();
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/HDFSExternalFileWriterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/HDFSExternalFileWriterFactory.java
new file mode 100644
index 0000000..761131f
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/HDFSExternalFileWriterFactory.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer;
+
+import static org.apache.asterix.common.exceptions.ErrorCode.EXTERNAL_SOURCE_ERROR;
+import static org.apache.hyracks.api.util.ExceptionUtils.getMessageOrToString;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.HDFSUtils;
+import org.apache.asterix.runtime.writer.ExternalFileWriterConfiguration;
+import org.apache.asterix.runtime.writer.IExternalFileWriter;
+import org.apache.asterix.runtime.writer.IExternalFileWriterFactory;
+import org.apache.asterix.runtime.writer.IExternalFileWriterFactoryProvider;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.api.util.ExceptionUtils;
+
+public class HDFSExternalFileWriterFactory implements IExternalFileWriterFactory {
+    private static final long serialVersionUID = 1L;
+    private static final char SEPARATOR = '/';
+    public static final IExternalFileWriterFactoryProvider PROVIDER = new IExternalFileWriterFactoryProvider() {
+        @Override
+        public IExternalFileWriterFactory create(ExternalFileWriterConfiguration configuration) {
+            return new HDFSExternalFileWriterFactory(configuration);
+        }
+
+        @Override
+        public char getSeparator() {
+            return SEPARATOR;
+        }
+    };
+
+    private final Map<String, String> configuration;
+    private final String staticPath;
+    private final SourceLocation pathSourceLocation;
+    private transient Credentials credentials;
+    private byte[] serializedCredentials;
+    private transient FileSystem fs;
+
+    private HDFSExternalFileWriterFactory(ExternalFileWriterConfiguration externalConfig) {
+        configuration = externalConfig.getConfiguration();
+        staticPath = externalConfig.getStaticPath();
+        pathSourceLocation = externalConfig.getPathSourceLocation();
+    }
+
+    private FileSystem createFileSystem(Configuration conf) throws CompilationException, HyracksDataException {
+        try {
+            if (credentials != null) {
+                UserGroupInformation ugi = UserGroupInformation.createRemoteUser(UUID.randomUUID().toString());
+                ugi.addCredentials(credentials);
+                return ugi.doAs((PrivilegedExceptionAction<FileSystem>) () -> FileSystem.get(conf));
+            }
+            return FileSystem.get(conf);
+        } catch (InterruptedException ex) {
+            throw HyracksDataException.create(ex);
+        } catch (IOException ex) {
+            throw CompilationException.create(EXTERNAL_SOURCE_ERROR, ex, getMessageOrToString(ex));
+        }
+    }
+
+    private void buildFileSystem() throws HyracksDataException {
+        try {
+            synchronized (this) {
+                if (credentials == null && serializedCredentials != null) {
+                    credentials = new Credentials();
+                    HDFSUtils.deserialize(serializedCredentials, credentials);
+                }
+                if (fs == null) {
+                    Configuration conf = HDFSUtils.configureHDFSwrite(configuration);
+                    fs = createFileSystem(conf);
+                }
+            }
+        } catch (IOException | CompilationException e) {
+            throw HyracksDataException.create(e);
+        }
+    }
+
+    @Override
+    public IExternalFileWriter createWriter(IHyracksTaskContext context, IExternalPrinterFactory printerFactory)
+            throws HyracksDataException {
+        buildFileSystem();
+        IExternalPrinter printer = printerFactory.createPrinter();
+        return new HDFSExternalFileWriter(printer, fs, staticPath == null, pathSourceLocation);
+    }
+
+    @Override
+    public char getSeparator() {
+        return SEPARATOR;
+    }
+
+    @Override
+    public void validate() throws AlgebricksException {
+        Configuration conf = HDFSUtils.configureHDFSwrite(configuration);
+        credentials = HDFSUtils.configureHadoopAuthentication(configuration, conf);
+        try {
+            if (credentials != null) {
+                serializedCredentials = HDFSUtils.serialize(credentials);
+            }
+            try (FileSystem testFs = createFileSystem(conf)) {
+                doValidate(testFs);
+            }
+        } catch (IOException ex) {
+            throw CompilationException.create(ErrorCode.EXTERNAL_SINK_ERROR, ExceptionUtils.getMessageOrToString(ex));
+        }
+    }
+
+    private void doValidate(FileSystem testFs) throws IOException, AlgebricksException {
+        if (staticPath != null) {
+            Path dirPath = new Path(staticPath);
+            if (testFs.exists(dirPath)) {
+                FileStatus fileStatus = testFs.getFileStatus(dirPath);
+                if (fileStatus.isFile()) {
+                    throw new CompilationException(ErrorCode.DIRECTORY_IS_NOT_EMPTY, pathSourceLocation, staticPath);
+                }
+                if (fileStatus.isDirectory()) {
+                    FileStatus[] fileStatuses = testFs.listStatus(dirPath);
+                    if (fileStatuses.length != 0) {
+                        throw new CompilationException(ErrorCode.DIRECTORY_IS_NOT_EMPTY, pathSourceLocation,
+                                staticPath);
+                    }
+                }
+            }
+            checkDirectoryWritePermission(testFs);
+        }
+    }
+
+    private void checkDirectoryWritePermission(FileSystem fs) throws AlgebricksException {
+        if (!Boolean.parseBoolean(configuration.getOrDefault(ExternalDataConstants.KEY_VALIDATE_WRITE_PERMISSION,
+                Boolean.TRUE.toString()))) {
+            return;
+        }
+        Path path = new Path(staticPath, "testFile");
+        try {
+            FSDataOutputStream outputStream = fs.create(path);
+            fs.deleteOnExit(path);
+            outputStream.write(0);
+            outputStream.close();
+        } catch (IOException ex) {
+            throw CompilationException.create(ErrorCode.EXTERNAL_SINK_ERROR, ex, getMessageOrToString(ex));
+        }
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/CsvExternalFilePrinter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/CsvExternalFilePrinter.java
new file mode 100644
index 0000000..b5299ad
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/CsvExternalFilePrinter.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.writer.printer;
+
+import org.apache.asterix.external.writer.compressor.IExternalFileCompressStreamFactory;
+import org.apache.hyracks.algebricks.data.IPrinter;
+
+public class CsvExternalFilePrinter extends AbstractTextualExternalPrinter {
+    CsvExternalFilePrinter(IPrinter printer, IExternalFileCompressStreamFactory compressStreamFactory) {
+        super(printer, compressStreamFactory);
+    }
+
+    @Override
+    void afterPrint() {
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/CsvExternalFilePrinterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/CsvExternalFilePrinterFactory.java
new file mode 100644
index 0000000..0ed1498
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/CsvExternalFilePrinterFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.writer.printer;
+
+import org.apache.asterix.external.writer.compressor.IExternalFileCompressStreamFactory;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
+import org.apache.hyracks.algebricks.data.IPrinterFactory;
+
+public class CsvExternalFilePrinterFactory implements IExternalPrinterFactory {
+    private static final long serialVersionUID = 8971234908711234L;
+    protected final IPrinterFactory printerFactory;
+    private final IExternalFileCompressStreamFactory compressStreamFactory;
+
+    public CsvExternalFilePrinterFactory(IPrinterFactory printerFactory,
+            IExternalFileCompressStreamFactory compressStreamFactory) {
+        this.printerFactory = printerFactory;
+        this.compressStreamFactory = compressStreamFactory;
+    }
+
+    @Override
+    public IExternalPrinter createPrinter() {
+        return new CsvExternalFilePrinter(printerFactory.createPrinter(), compressStreamFactory);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinter.java
new file mode 100644
index 0000000..ba7a1ee
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinter.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.writer.printer;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.AsterixParquetRuntimeException;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.writer.printer.parquet.AsterixParquetWriter;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.MessageTypeParser;
+
+public class ParquetExternalFilePrinter implements IExternalPrinter {
+    private final IAType typeInfo;
+    private final CompressionCodecName compressionCodecName;
+    private MessageType schema;
+    private ParquetOutputFile parquetOutputFile;
+    private String parquetSchemaString;
+    private ParquetWriter<IValueReference> writer;
+    private final long rowGroupSize;
+    private final int pageSize;
+    private final ParquetProperties.WriterVersion writerVersion;
+
+    public ParquetExternalFilePrinter(CompressionCodecName compressionCodecName, String parquetSchemaString,
+            IAType typeInfo, long rowGroupSize, int pageSize, ParquetProperties.WriterVersion writerVersion) {
+        this.compressionCodecName = compressionCodecName;
+        this.parquetSchemaString = parquetSchemaString;
+        this.typeInfo = typeInfo;
+        this.rowGroupSize = rowGroupSize;
+        this.pageSize = pageSize;
+        this.writerVersion = writerVersion;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        schema = MessageTypeParser.parseMessageType(parquetSchemaString);
+    }
+
+    @Override
+    public void newStream(OutputStream outputStream) throws HyracksDataException {
+        if (parquetOutputFile != null) {
+            close();
+        }
+        parquetOutputFile = new ParquetOutputFile(outputStream);
+        Configuration conf = new Configuration();
+
+        try {
+            writer = AsterixParquetWriter.builder(parquetOutputFile).withCompressionCodec(compressionCodecName)
+                    .withType(schema).withTypeInfo(typeInfo).withRowGroupSize(rowGroupSize).withPageSize(pageSize)
+                    .withDictionaryPageSize(ExternalDataConstants.PARQUET_DICTIONARY_PAGE_SIZE)
+                    .enableDictionaryEncoding().withValidation(false).withWriterVersion(writerVersion).withConf(conf)
+                    .build();
+        } catch (IOException e) {
+            throw HyracksDataException.create(e);
+        }
+
+    }
+
+    @Override
+    public void print(IValueReference value) throws HyracksDataException {
+        try {
+            this.writer.write(value);
+        } catch (AsterixParquetRuntimeException e) {
+            throw e.getHyracksDataException();
+        } catch (IOException e) {
+            throw HyracksDataException.create(e);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        if (this.writer != null) {
+            try {
+                this.writer.close();
+            } catch (IOException e) {
+                throw HyracksDataException.create(e);
+            }
+        }
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactory.java
new file mode 100644
index 0000000..b6ad34e
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactory.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer;
+
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+
+public class ParquetExternalFilePrinterFactory implements IExternalPrinterFactory {
+    private static final long serialVersionUID = 8971234908711235L;
+    private String parquetSchemaString;
+    private final IAType typeInfo;
+    private final CompressionCodecName compressionCodecName;
+    private final long rowGroupSize;
+    private final int pageSize;
+    private final ParquetProperties.WriterVersion writerVersion;
+
+    public ParquetExternalFilePrinterFactory(CompressionCodecName compressionCodecName, String parquetSchemaString,
+            IAType typeInfo, long rowGroupSize, int pageSize, ParquetProperties.WriterVersion writerVersion) {
+        this.compressionCodecName = compressionCodecName;
+        this.parquetSchemaString = parquetSchemaString;
+        this.typeInfo = typeInfo;
+        this.rowGroupSize = rowGroupSize;
+        this.pageSize = pageSize;
+        this.writerVersion = writerVersion;
+    }
+
+    public ParquetExternalFilePrinterFactory(CompressionCodecName compressionCodecName, IAType typeInfo,
+            long rowGroupSize, int pageSize, ParquetProperties.WriterVersion writerVersion) {
+        this.compressionCodecName = compressionCodecName;
+        this.typeInfo = typeInfo;
+        this.rowGroupSize = rowGroupSize;
+        this.pageSize = pageSize;
+        this.writerVersion = writerVersion;
+    }
+
+    public void setParquetSchemaString(String parquetSchemaString) {
+        this.parquetSchemaString = parquetSchemaString;
+    }
+
+    @Override
+    public IExternalPrinter createPrinter() {
+        return new ParquetExternalFilePrinter(compressionCodecName, parquetSchemaString, typeInfo, rowGroupSize,
+                pageSize, writerVersion);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactoryProvider.java
new file mode 100644
index 0000000..bd70853
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactoryProvider.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer;
+
+import java.io.Serializable;
+
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+
+public class ParquetExternalFilePrinterFactoryProvider implements Serializable {
+    private static final long serialVersionUID = 8971234908711237L;
+    private final IAType typeInfo;
+    private final CompressionCodecName compressionCodecName;
+    private final long rowGroupSize;
+    private final int pageSize;
+    private final ParquetProperties.WriterVersion writerVersion;
+
+    public ParquetExternalFilePrinterFactoryProvider(CompressionCodecName compressionCodecName, IAType typeInfo,
+            long rowGroupSize, int pageSize, ParquetProperties.WriterVersion writerVersion) {
+        this.compressionCodecName = compressionCodecName;
+        this.typeInfo = typeInfo;
+        this.rowGroupSize = rowGroupSize;
+        this.pageSize = pageSize;
+        this.writerVersion = writerVersion;
+    }
+
+    public IExternalPrinterFactory createPrinterFactory() {
+        return new ParquetExternalFilePrinterFactory(compressionCodecName, typeInfo, rowGroupSize, pageSize,
+                writerVersion);
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetOutputFile.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetOutputFile.java
new file mode 100644
index 0000000..5db600f
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetOutputFile.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.writer.printer;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.parquet.hadoop.util.HadoopStreams;
+import org.apache.parquet.io.OutputFile;
+import org.apache.parquet.io.PositionOutputStream;
+
+public class ParquetOutputFile implements OutputFile {
+    private final FSDataOutputStream fs;
+
+    /*
+     This class wraps OutputStream as a file that Parquet SDK supports writing to.
+     By default, this assumes output stream doesn't support block size which distributed file systems use.
+     Hadoop File System Library use this as a default block size
+     Ref : https://github.com/apache/hadoop/blob/74ff00705cf67911f1ff8320c6c97354350d6952/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java#L2756
+     */
+    private static final long DEFAULT_BLOCK_SIZE = 33554432L;
+
+    public ParquetOutputFile(OutputStream os) {
+        this.fs = new FSDataOutputStream(os, new FileSystem.Statistics("test"));
+    }
+
+    @Override
+    public PositionOutputStream create(long blockSizeHint) throws IOException {
+        return HadoopStreams.wrap(fs);
+    }
+
+    @Override
+    public PositionOutputStream createOrOverwrite(long blockSizeHint) throws IOException {
+        return HadoopStreams.wrap(fs);
+    }
+
+    @Override
+    public boolean supportsBlockSize() {
+        return false;
+    }
+
+    @Override
+    public long defaultBlockSize() {
+        return DEFAULT_BLOCK_SIZE;
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetTypeMap.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetTypeMap.java
new file mode 100644
index 0000000..0dcdb3a
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetTypeMap.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.util.Map;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.PrimitiveType;
+
+public class AsterixParquetTypeMap {
+
+    public static final Map<ATypeTag, PrimitiveType.PrimitiveTypeName> PRIMITIVE_TYPE_NAME_MAP =
+            Map.ofEntries(Map.entry(ATypeTag.BOOLEAN, PrimitiveType.PrimitiveTypeName.BOOLEAN),
+                    Map.entry(ATypeTag.STRING, PrimitiveType.PrimitiveTypeName.BINARY),
+                    Map.entry(ATypeTag.TINYINT, PrimitiveType.PrimitiveTypeName.INT32),
+                    Map.entry(ATypeTag.SMALLINT, PrimitiveType.PrimitiveTypeName.INT32),
+                    Map.entry(ATypeTag.INTEGER, PrimitiveType.PrimitiveTypeName.INT32),
+                    Map.entry(ATypeTag.BIGINT, PrimitiveType.PrimitiveTypeName.INT64),
+                    Map.entry(ATypeTag.FLOAT, PrimitiveType.PrimitiveTypeName.FLOAT),
+                    Map.entry(ATypeTag.DOUBLE, PrimitiveType.PrimitiveTypeName.DOUBLE),
+                    Map.entry(ATypeTag.DATE, PrimitiveType.PrimitiveTypeName.INT32),
+                    Map.entry(ATypeTag.TIME, PrimitiveType.PrimitiveTypeName.INT32),
+                    Map.entry(ATypeTag.DATETIME, PrimitiveType.PrimitiveTypeName.INT64));
+
+    public static final Map<ATypeTag, LogicalTypeAnnotation> LOGICAL_TYPE_ANNOTATION_MAP =
+            Map.ofEntries(Map.entry(ATypeTag.STRING, LogicalTypeAnnotation.stringType()),
+                    Map.entry(ATypeTag.DATE, LogicalTypeAnnotation.dateType()),
+                    Map.entry(ATypeTag.TIME,
+                            LogicalTypeAnnotation.timeType(true, LogicalTypeAnnotation.TimeUnit.MILLIS)),
+                    Map.entry(ATypeTag.DATETIME,
+                            LogicalTypeAnnotation.timestampType(true, LogicalTypeAnnotation.TimeUnit.MILLIS)));
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetWriter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetWriter.java
new file mode 100644
index 0000000..edeab1f
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetWriter.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.om.types.IAType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.hadoop.api.WriteSupport;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.io.OutputFile;
+import org.apache.parquet.schema.MessageType;
+
+public class AsterixParquetWriter extends ParquetWriter<IValueReference> {
+    public static Builder builder(Path file) {
+        return new Builder(file);
+    }
+
+    public static Builder builder(OutputFile file) {
+        return new Builder(file);
+    }
+
+    AsterixParquetWriter(Path file, WriteSupport<IValueReference> writeSupport,
+            CompressionCodecName compressionCodecName, int blockSize, int pageSize, boolean enableDictionary,
+            boolean enableValidation, ParquetProperties.WriterVersion writerVersion, Configuration conf)
+            throws IOException {
+        super(file, writeSupport, compressionCodecName, blockSize, pageSize, pageSize, enableDictionary,
+                enableValidation, writerVersion, conf);
+    }
+
+    public static class Builder extends ParquetWriter.Builder<IValueReference, Builder> {
+        private MessageType type;
+        private IAType typeInfo;
+        private Map<String, String> extraMetaData;
+
+        private Builder(Path file) {
+            super(file);
+            this.type = null;
+            this.extraMetaData = new HashMap();
+        }
+
+        private Builder(OutputFile file) {
+            super(file);
+            this.type = null;
+            this.extraMetaData = new HashMap();
+        }
+
+        public Builder withType(MessageType type) {
+            this.type = type;
+            return this;
+        }
+
+        public Builder withTypeInfo(IAType typeInfo) {
+            this.typeInfo = typeInfo;
+            return this;
+        }
+
+        public Builder withExtraMetaData(Map<String, String> extraMetaData) {
+            this.extraMetaData = extraMetaData;
+            return this;
+        }
+
+        protected Builder self() {
+            return this;
+        }
+
+        protected WriteSupport<IValueReference> getWriteSupport(Configuration conf) {
+            return new ObjectWriteSupport(this.type, this.typeInfo, this.extraMetaData);
+        }
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/FieldNamesDictionary.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/FieldNamesDictionary.java
new file mode 100644
index 0000000..7058bf6
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/FieldNamesDictionary.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.om.dictionary.FieldNamesTrieDictionary;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.util.string.UTF8StringUtil;
+
+public class FieldNamesDictionary {
+    private final FieldNamesTrieDictionary trie;
+    private final List<String> fieldNames;
+    private final StringBuilder builder;
+
+    public FieldNamesDictionary() {
+        trie = new FieldNamesTrieDictionary();
+        fieldNames = new ArrayList<>();
+        builder = new StringBuilder();
+    }
+
+    public String getOrCreateFieldNameIndex(IValueReference pointable) throws HyracksDataException {
+        int index = trie.getOrCreateFieldNameIndex(pointable);
+        if (index < fieldNames.size()) {
+            return fieldNames.get(index);
+        }
+
+        builder.setLength(0);
+        String fieldName = UTF8StringUtil.toString(pointable.getByteArray(), pointable.getStartOffset(), builder);
+        fieldNames.add(fieldName);
+        return fieldName;
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ISchemaChecker.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ISchemaChecker.java
new file mode 100644
index 0000000..99b9736
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ISchemaChecker.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+
+public interface ISchemaChecker {
+    enum SchemaComparisonType {
+        EQUIVALENT,
+        GROWING,
+        CONFLICTING
+    }
+
+    static SchemaComparisonType max(ISchemaChecker.SchemaComparisonType a, ISchemaChecker.SchemaComparisonType b) {
+        return a.compareTo(b) > 0 ? a : b;
+    }
+
+    SchemaComparisonType checkSchema(ParquetSchemaTree.SchemaNode schemaNode, IValueReference iValueReference)
+            throws HyracksDataException;
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ObjectWriteSupport.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ObjectWriteSupport.java
new file mode 100644
index 0000000..512b523
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ObjectWriteSupport.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.util.Map;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.AsterixParquetRuntimeException;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.hadoop.api.WriteSupport;
+import org.apache.parquet.io.api.RecordConsumer;
+import org.apache.parquet.schema.MessageType;
+
+public class ObjectWriteSupport extends WriteSupport<IValueReference> {
+    private MessageType schema;
+
+    private RecordConsumer recordConsumer;
+    private Map<String, String> extraMetaData;
+    ParquetRecordLazyVisitor parquetRecordLazyVisitor;
+
+    public ObjectWriteSupport(MessageType schema, IAType typeInfo, Map<String, String> extraMetaData) {
+        this.schema = schema;
+        this.extraMetaData = extraMetaData;
+        parquetRecordLazyVisitor = new ParquetRecordLazyVisitor(schema, typeInfo);
+    }
+
+    public String getName() {
+        return "asterix";
+    }
+
+    public WriteSupport.WriteContext init(Configuration configuration) {
+        return new WriteSupport.WriteContext(this.schema, this.extraMetaData);
+    }
+
+    public void prepareForWrite(RecordConsumer recordConsumer) {
+        this.recordConsumer = recordConsumer;
+    }
+
+    @Override
+    public void write(IValueReference valueReference) {
+        try {
+            parquetRecordLazyVisitor.consumeRecord(valueReference, recordConsumer);
+        } catch (HyracksDataException e) {
+            throw new AsterixParquetRuntimeException(e);
+        }
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetRecordLazyVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetRecordLazyVisitor.java
new file mode 100644
index 0000000..373bfe4
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetRecordLazyVisitor.java
@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.ELEMENT_FIELD;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.GROUP_TYPE_ERROR_FIELD;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.LIST_FIELD;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.PRIMITIVE_TYPE_ERROR_FIELD;
+
+import org.apache.asterix.om.lazy.AbstractLazyVisitablePointable;
+import org.apache.asterix.om.lazy.AbstractListLazyVisitablePointable;
+import org.apache.asterix.om.lazy.FlatLazyVisitablePointable;
+import org.apache.asterix.om.lazy.ILazyVisitablePointableVisitor;
+import org.apache.asterix.om.lazy.RecordLazyVisitablePointable;
+import org.apache.asterix.om.lazy.TypedRecordLazyVisitablePointable;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.RecordConsumer;
+import org.apache.parquet.schema.GroupType;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.Type;
+
+public class ParquetRecordLazyVisitor implements ILazyVisitablePointableVisitor<Void, Type> {
+
+    private final MessageType schema;
+    private final RecordLazyVisitablePointable rec;
+    private RecordConsumer recordConsumer;
+    private final FieldNamesDictionary fieldNamesDictionary;
+
+    private final ParquetValueWriter parquetValueWriter;
+
+    public ParquetRecordLazyVisitor(MessageType schema, IAType typeInfo) {
+        this.schema = schema;
+        if (typeInfo.getTypeTag() == ATypeTag.OBJECT) {
+            this.rec = new TypedRecordLazyVisitablePointable((ARecordType) typeInfo);
+        } else if (typeInfo.getTypeTag() == ATypeTag.ANY) {
+            this.rec = new RecordLazyVisitablePointable(true);
+        } else {
+            throw new RuntimeException("Type Unsupported for parquet printing");
+        }
+        this.fieldNamesDictionary = new FieldNamesDictionary();
+        this.parquetValueWriter = new ParquetValueWriter();
+    }
+
+    public MessageType getSchema() {
+        return schema;
+    }
+
+    @Override
+    public Void visit(RecordLazyVisitablePointable pointable, Type type) throws HyracksDataException {
+
+        if (type.isPrimitive()) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, GROUP_TYPE_ERROR_FIELD,
+                    PRIMITIVE_TYPE_ERROR_FIELD, type.getName());
+        }
+        GroupType groupType = type.asGroupType();
+        recordConsumer.startGroup();
+
+        for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+            pointable.nextChild();
+            AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+            String columnName = fieldNamesDictionary.getOrCreateFieldNameIndex(pointable.getFieldName());
+
+            if (!groupType.containsField(columnName)) {
+                throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, columnName,
+                        groupType.getName());
+            }
+            recordConsumer.startField(columnName, groupType.getFieldIndex(columnName));
+            child.accept(this, groupType.getType(columnName));
+            recordConsumer.endField(columnName, groupType.getFieldIndex(columnName));
+        }
+        recordConsumer.endGroup();
+        return null;
+    }
+
+    @Override
+    public Void visit(AbstractListLazyVisitablePointable pointable, Type type) throws HyracksDataException {
+
+        if (type.isPrimitive()) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, GROUP_TYPE_ERROR_FIELD,
+                    PRIMITIVE_TYPE_ERROR_FIELD, type.getName());
+        }
+        GroupType groupType = type.asGroupType();
+
+        if (!groupType.containsField(LIST_FIELD)) {
+            throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, LIST_FIELD,
+                    groupType.getName());
+        }
+
+        if (groupType.getType(LIST_FIELD).isPrimitive()) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, GROUP_TYPE_ERROR_FIELD,
+                    PRIMITIVE_TYPE_ERROR_FIELD, LIST_FIELD);
+        }
+
+        GroupType listType = groupType.getType(LIST_FIELD).asGroupType();
+
+        if (!listType.containsField(ELEMENT_FIELD)) {
+            throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, ELEMENT_FIELD,
+                    listType.getName());
+        }
+
+        recordConsumer.startGroup();
+
+        if (pointable.getNumberOfChildren() > 0) {
+            recordConsumer.startField(LIST_FIELD, groupType.getFieldIndex(LIST_FIELD));
+
+            for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+                pointable.nextChild();
+                AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+
+                recordConsumer.startGroup();
+                recordConsumer.startField(ELEMENT_FIELD, listType.getFieldIndex(ELEMENT_FIELD));
+                child.accept(this, listType.getType(ELEMENT_FIELD));
+                recordConsumer.endField(ELEMENT_FIELD, listType.getFieldIndex(ELEMENT_FIELD));
+                recordConsumer.endGroup();
+
+            }
+
+            recordConsumer.endField(LIST_FIELD, groupType.getFieldIndex(LIST_FIELD));
+        }
+
+        recordConsumer.endGroup();
+        return null;
+    }
+
+    @Override
+    public Void visit(FlatLazyVisitablePointable pointable, Type type) throws HyracksDataException {
+
+        if (!type.isPrimitive()) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, PRIMITIVE_TYPE_ERROR_FIELD,
+                    GROUP_TYPE_ERROR_FIELD, type.getName());
+        }
+        parquetValueWriter.addValueToColumn(recordConsumer, pointable, type.asPrimitiveType());
+        return null;
+    }
+
+    public void consumeRecord(IValueReference valueReference, RecordConsumer recordConsumer)
+            throws HyracksDataException {
+        rec.set(valueReference);
+        this.recordConsumer = recordConsumer;
+
+        recordConsumer.startMessage();
+        for (int i = 0; i < rec.getNumberOfChildren(); i++) {
+            rec.nextChild();
+            String columnName = fieldNamesDictionary.getOrCreateFieldNameIndex(rec.getFieldName());
+            AbstractLazyVisitablePointable child = rec.getChildVisitablePointable();
+
+            if (!schema.containsField(columnName)) {
+                throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, columnName,
+                        schema.getName());
+            }
+
+            recordConsumer.startField(columnName, schema.getFieldIndex(columnName));
+            child.accept(this, schema.getType(columnName));
+            recordConsumer.endField(columnName, schema.getFieldIndex(columnName));
+        }
+        recordConsumer.endMessage();
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaBuilderUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaBuilderUtils.java
new file mode 100644
index 0000000..da0cef0
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaBuilderUtils.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.PrimitiveType;
+import org.apache.parquet.schema.Types;
+
+public class ParquetSchemaBuilderUtils {
+
+    public static Types.BaseGroupBuilder<?, ?> getGroupChild(Types.Builder parent) {
+        if (parent instanceof Types.BaseGroupBuilder) {
+            return ((Types.BaseGroupBuilder<?, ?>) parent).optionalGroup();
+        } else if (parent instanceof Types.BaseListBuilder) {
+            return ((Types.BaseListBuilder<?, ?>) parent).optionalGroupElement();
+        } else {
+            return null;
+        }
+    }
+
+    public static Types.BaseListBuilder<?, ?> getListChild(Types.Builder parent) {
+        if (parent instanceof Types.BaseGroupBuilder) {
+            return ((Types.BaseGroupBuilder<?, ?>) parent).optionalList();
+        } else if (parent instanceof Types.BaseListBuilder) {
+            return ((Types.BaseListBuilder<?, ?>) parent).optionalListElement();
+        } else {
+            return null;
+        }
+    }
+
+    public static Types.Builder<?, ?> getPrimitiveChild(Types.Builder parent, PrimitiveType.PrimitiveTypeName type,
+            LogicalTypeAnnotation annotation) {
+        if (parent instanceof Types.BaseGroupBuilder) {
+            return ((Types.BaseGroupBuilder<?, ?>) parent).optional(type).as(annotation);
+        } else if (parent instanceof Types.BaseListBuilder) {
+            return ((Types.BaseListBuilder<?, ?>) parent).optionalElement(type).as(annotation);
+        } else {
+            return null;
+        }
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaLazyVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaLazyVisitor.java
new file mode 100644
index 0000000..055c635
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaLazyVisitor.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaTree.buildParquetSchema;
+
+import java.util.Map;
+
+import org.apache.asterix.om.lazy.AbstractLazyVisitablePointable;
+import org.apache.asterix.om.lazy.AbstractListLazyVisitablePointable;
+import org.apache.asterix.om.lazy.FlatLazyVisitablePointable;
+import org.apache.asterix.om.lazy.ILazyVisitablePointableVisitor;
+import org.apache.asterix.om.lazy.RecordLazyVisitablePointable;
+import org.apache.asterix.om.lazy.TypedRecordLazyVisitablePointable;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.Types;
+
+public class ParquetSchemaLazyVisitor implements ILazyVisitablePointableVisitor<Void, ParquetSchemaTree.SchemaNode> {
+    private final RecordLazyVisitablePointable rec;
+    private final FieldNamesDictionary fieldNamesDictionary;
+    private final static String SCHEMA_NAME = "asterix_schema";
+
+    public ParquetSchemaLazyVisitor(IAType typeInfo) {
+        this.fieldNamesDictionary = new FieldNamesDictionary();
+        if (typeInfo.getTypeTag() == ATypeTag.OBJECT) {
+            this.rec = new TypedRecordLazyVisitablePointable((ARecordType) typeInfo);
+        } else if (typeInfo.getTypeTag() == ATypeTag.ANY) {
+            this.rec = new RecordLazyVisitablePointable(true);
+        } else {
+            throw new RuntimeException("Type Unsupported for parquet printing");
+        }
+    }
+
+    @Override
+    public Void visit(RecordLazyVisitablePointable pointable, ParquetSchemaTree.SchemaNode schemaNode)
+            throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            schemaNode.setType(new ParquetSchemaTree.RecordType());
+        }
+        if (!(schemaNode.getType() instanceof ParquetSchemaTree.RecordType)) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA);
+        }
+        ParquetSchemaTree.RecordType recordType = (ParquetSchemaTree.RecordType) schemaNode.getType();
+        for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+            pointable.nextChild();
+            AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+            String childColumnName = fieldNamesDictionary.getOrCreateFieldNameIndex(pointable.getFieldName());
+            ParquetSchemaTree.SchemaNode childType;
+            if (recordType.getChildren().containsKey(childColumnName)) {
+                childType = recordType.getChildren().get(childColumnName);
+            } else {
+                childType = new ParquetSchemaTree.SchemaNode();
+                recordType.add(childColumnName, childType);
+            }
+            child.accept(this, childType);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(AbstractListLazyVisitablePointable pointable, ParquetSchemaTree.SchemaNode schemaNode)
+            throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            schemaNode.setType(new ParquetSchemaTree.ListType());
+        }
+        if (!(schemaNode.getType() instanceof ParquetSchemaTree.ListType)) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA);
+        }
+        ParquetSchemaTree.ListType listType = (ParquetSchemaTree.ListType) schemaNode.getType();
+        for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+            pointable.nextChild();
+            AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+            if (listType.isEmpty()) {
+                listType.setChild(new ParquetSchemaTree.SchemaNode());
+            }
+            child.accept(this, listType.getChild());
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(FlatLazyVisitablePointable pointable, ParquetSchemaTree.SchemaNode schemaNode)
+            throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            schemaNode.setType(new ParquetSchemaTree.FlatType(
+                    AsterixParquetTypeMap.PRIMITIVE_TYPE_NAME_MAP.get(pointable.getTypeTag()),
+                    AsterixParquetTypeMap.LOGICAL_TYPE_ANNOTATION_MAP.get(pointable.getTypeTag())));
+            return null;
+        }
+        if (!(schemaNode.getType() instanceof ParquetSchemaTree.FlatType)) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA);
+        }
+        ParquetSchemaTree.FlatType flatType = (ParquetSchemaTree.FlatType) schemaNode.getType();
+        if (!(flatType.getPrimitiveTypeName() == AsterixParquetTypeMap.PRIMITIVE_TYPE_NAME_MAP
+                .get(pointable.getTypeTag()))
+                || !(flatType.getLogicalTypeAnnotation() == AsterixParquetTypeMap.LOGICAL_TYPE_ANNOTATION_MAP
+                        .get(pointable.getTypeTag()))) {
+            throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA);
+        }
+        return null;
+    }
+
+    public ParquetSchemaTree.SchemaNode inferSchema(IValueReference valueReference) throws HyracksDataException {
+        ParquetSchemaTree.SchemaNode schemaNode = new ParquetSchemaTree.SchemaNode();
+        rec.set(valueReference);
+        rec.accept(this, schemaNode);
+        return schemaNode;
+    }
+
+    public static MessageType generateSchema(ParquetSchemaTree.SchemaNode schemaRoot) throws HyracksDataException {
+        Types.MessageTypeBuilder builder = Types.buildMessage();
+        if (schemaRoot.getType() == null)
+            return builder.named(SCHEMA_NAME);
+        for (Map.Entry<String, ParquetSchemaTree.SchemaNode> entry : ((ParquetSchemaTree.RecordType) schemaRoot
+                .getType()).getChildren().entrySet()) {
+            buildParquetSchema(builder, entry.getValue(), entry.getKey());
+        }
+        return builder.named(SCHEMA_NAME);
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaTree.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaTree.java
new file mode 100644
index 0000000..ff512df
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetSchemaTree.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaBuilderUtils.getGroupChild;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaBuilderUtils.getListChild;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaBuilderUtils.getPrimitiveChild;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.PrimitiveType;
+import org.apache.parquet.schema.Types;
+
+public class ParquetSchemaTree {
+
+    public static class SchemaNode {
+        private AbstractType type;
+
+        public SchemaNode() {
+            type = null;
+        }
+
+        public void setType(AbstractType type) {
+            this.type = type;
+        }
+
+        public AbstractType getType() {
+            return type;
+        }
+    }
+
+    static class RecordType extends AbstractType {
+        private final Map<String, SchemaNode> children;
+
+        public RecordType() {
+            children = new HashMap<>();
+        }
+
+        void add(String fieldName, SchemaNode childType) {
+            children.put(fieldName, childType);
+        }
+
+        SchemaNode get(String fieldName) {
+            return children.get(fieldName);
+        }
+
+        Map<String, SchemaNode> getChildren() {
+            return children;
+        }
+    }
+
+    abstract static class AbstractType {
+    }
+
+    static class FlatType extends AbstractType {
+        private final PrimitiveType.PrimitiveTypeName primitiveTypeName;
+        private final LogicalTypeAnnotation logicalTypeAnnotation;
+
+        public FlatType(PrimitiveType.PrimitiveTypeName primitiveTypeName,
+                LogicalTypeAnnotation logicalTypeAnnotation) {
+            this.primitiveTypeName = primitiveTypeName;
+            this.logicalTypeAnnotation = logicalTypeAnnotation;
+        }
+
+        public LogicalTypeAnnotation getLogicalTypeAnnotation() {
+            return logicalTypeAnnotation;
+        }
+
+        public PrimitiveType.PrimitiveTypeName getPrimitiveTypeName() {
+            return primitiveTypeName;
+        }
+    }
+
+    static class ListType extends AbstractType {
+        private SchemaNode child;
+
+        public ListType() {
+            child = null;
+        }
+
+        void setChild(SchemaNode child) {
+            this.child = child;
+        }
+
+        boolean isEmpty() {
+            return child == null;
+        }
+
+        public SchemaNode getChild() {
+            return child;
+        }
+    }
+
+    public static void buildParquetSchema(Types.Builder builder, SchemaNode schemaNode, String columnName)
+            throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            throw new HyracksDataException(ErrorCode.EMPTY_TYPE_INFERRED);
+        }
+        AbstractType typeClass = schemaNode.getType();
+        if (typeClass instanceof RecordType) {
+            buildRecord(builder, (RecordType) schemaNode.getType(), columnName);
+        } else if (typeClass instanceof ListType) {
+            buildList(builder, (ListType) schemaNode.getType(), columnName);
+        } else if (typeClass instanceof FlatType) {
+            buildFlat(builder, (FlatType) schemaNode.getType(), columnName);
+        }
+    }
+
+    private static void buildRecord(Types.Builder builder, RecordType type, String columnName)
+            throws HyracksDataException {
+        Types.BaseGroupBuilder<?, ?> childBuilder = getGroupChild(builder);
+        for (Map.Entry<String, SchemaNode> entry : type.getChildren().entrySet()) {
+            buildParquetSchema(childBuilder, entry.getValue(), entry.getKey());
+        }
+        childBuilder.named(columnName);
+    }
+
+    private static void buildList(Types.Builder builder, ListType type, String columnName) throws HyracksDataException {
+        Types.BaseListBuilder<?, ?> childBuilder = getListChild(builder);
+        SchemaNode child = type.child;
+        if (child == null) {
+            throw new HyracksDataException(ErrorCode.EMPTY_TYPE_INFERRED);
+        }
+        buildParquetSchema(childBuilder, child, columnName);
+    }
+
+    private static void buildFlat(Types.Builder builder, FlatType type, String columnName) throws HyracksDataException {
+        if (type.getPrimitiveTypeName() == null) {
+            // Not sure if this is the right thing to do here
+            throw new HyracksDataException(ErrorCode.EMPTY_TYPE_INFERRED);
+        }
+        getPrimitiveChild(builder, type.getPrimitiveTypeName(), type.getLogicalTypeAnnotation()).named(columnName);
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetValueWriter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetValueWriter.java
new file mode 100644
index 0000000..0390315
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetValueWriter.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.io.IOException;
+
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.dataflow.data.nontagged.printers.PrintTools;
+import org.apache.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import org.apache.asterix.om.lazy.FlatLazyVisitablePointable;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.utils.ResettableByteArrayOutputStream;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+import org.apache.hyracks.util.string.UTF8StringUtil;
+import org.apache.parquet.io.api.Binary;
+import org.apache.parquet.io.api.RecordConsumer;
+import org.apache.parquet.schema.PrimitiveType;
+
+public class ParquetValueWriter {
+    public static final String LIST_FIELD = "list";
+    public static final String ELEMENT_FIELD = "element";
+
+    public static final String GROUP_TYPE_ERROR_FIELD = "group";
+    public static final String PRIMITIVE_TYPE_ERROR_FIELD = "primitive";
+
+    private final VoidPointable voidPointable;
+    private final ResettableByteArrayOutputStream byteArrayOutputStream;
+
+    ParquetValueWriter() {
+        this.voidPointable = VoidPointable.FACTORY.createPointable();
+        this.byteArrayOutputStream = new ResettableByteArrayOutputStream();
+    }
+
+    private void addIntegerType(long value, PrimitiveType.PrimitiveTypeName primitiveTypeName, ATypeTag typeTag,
+            RecordConsumer recordConsumer) throws HyracksDataException {
+        switch (primitiveTypeName) {
+            case INT32:
+                recordConsumer.addInteger((int) value);
+                break;
+            case INT64:
+                recordConsumer.addLong(value);
+                break;
+            case FLOAT:
+                recordConsumer.addFloat(value);
+                break;
+            case DOUBLE:
+                recordConsumer.addDouble(value);
+                break;
+            default:
+                throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, primitiveTypeName, typeTag);
+        }
+    }
+
+    public void addValueToColumn(RecordConsumer recordConsumer, FlatLazyVisitablePointable pointable,
+            PrimitiveType type) throws HyracksDataException {
+
+        ATypeTag typeTag = pointable.getTypeTag();
+        byte[] b = pointable.getByteArray();
+        int s, l;
+
+        if (pointable.isTagged()) {
+            s = pointable.getStartOffset() + 1;
+            l = pointable.getLength() - 1;
+        } else {
+            s = pointable.getStartOffset();
+            l = pointable.getLength();
+        }
+        voidPointable.set(b, s, l);
+
+        PrimitiveType.PrimitiveTypeName primitiveTypeName = type.getPrimitiveTypeName();
+
+        switch (typeTag) {
+            case TINYINT:
+                byte tinyIntValue = AInt8SerializerDeserializer.getByte(b, s);
+                addIntegerType(tinyIntValue, primitiveTypeName, typeTag, recordConsumer);
+                break;
+            case SMALLINT:
+                short smallIntValue = AInt16SerializerDeserializer.getShort(b, s);
+                addIntegerType(smallIntValue, primitiveTypeName, typeTag, recordConsumer);
+                break;
+            case INTEGER:
+                int intValue = AInt32SerializerDeserializer.getInt(b, s);
+                addIntegerType(intValue, primitiveTypeName, typeTag, recordConsumer);
+                break;
+            case BIGINT:
+                long bigIntValue = AInt64SerializerDeserializer.getLong(b, s);
+                addIntegerType(bigIntValue, primitiveTypeName, typeTag, recordConsumer);
+                break;
+            case FLOAT:
+                float floatValue = AFloatSerializerDeserializer.getFloat(b, s);
+                switch (primitiveTypeName) {
+                    case INT32:
+                        recordConsumer.addInteger((int) floatValue);
+                        break;
+                    case INT64:
+                        recordConsumer.addLong((long) floatValue);
+                        break;
+                    case FLOAT:
+                        recordConsumer.addFloat(floatValue);
+                        break;
+                    case DOUBLE:
+                        recordConsumer.addDouble(floatValue);
+                        break;
+                    default:
+                        throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, primitiveTypeName, typeTag);
+                }
+                break;
+            case DOUBLE:
+                double doubleValue = ADoubleSerializerDeserializer.getDouble(b, s);
+                switch (primitiveTypeName) {
+                    case INT32:
+                        recordConsumer.addInteger((int) doubleValue);
+                        break;
+                    case INT64:
+                        recordConsumer.addLong((long) doubleValue);
+                        break;
+                    case FLOAT:
+                        recordConsumer.addFloat((float) doubleValue);
+                        break;
+                    case DOUBLE:
+                        recordConsumer.addDouble(doubleValue);
+                        break;
+                    default:
+                        throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, primitiveTypeName, typeTag);
+                }
+                break;
+            case STRING:
+                int utfLength = UTF8StringUtil.getUTFLength(b, s);
+                if (primitiveTypeName == PrimitiveType.PrimitiveTypeName.BINARY) {
+                    byteArrayOutputStream.reset();
+                    try {
+                        PrintTools.writeUTF8StringAsJSONUnquoted(b, s, l, utfLength, byteArrayOutputStream);
+                    } catch (IOException e) {
+                        throw HyracksDataException.create(e);
+                    }
+                    recordConsumer.addBinary(Binary.fromReusedByteArray(byteArrayOutputStream.getByteArray(), 0,
+                            byteArrayOutputStream.getLength()));
+
+                } else {
+                    throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, primitiveTypeName, typeTag);
+                }
+                break;
+            case BOOLEAN:
+                boolean booleanValue = ABooleanSerializerDeserializer.getBoolean(b, s);
+                if (primitiveTypeName == PrimitiveType.PrimitiveTypeName.BOOLEAN) {
+                    recordConsumer.addBoolean(booleanValue);
+                } else {
+                    throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, primitiveTypeName, typeTag);
+                }
+                break;
+            case DATE:
+                int dateValue = ADateSerializerDeserializer.getChronon(b, s);
+                addIntegerType(dateValue, primitiveTypeName, typeTag, recordConsumer);
+                break;
+            case TIME:
+                int timeValue = ATimeSerializerDeserializer.getChronon(b, s);
+                addIntegerType(timeValue, primitiveTypeName, typeTag, recordConsumer);
+                break;
+            case DATETIME:
+                long dateTimeValue = ADateTimeSerializerDeserializer.getChronon(b, s);
+                addIntegerType(dateTimeValue, primitiveTypeName, typeTag, recordConsumer);
+            case NULL:
+            case MISSING:
+                break;
+            default:
+                throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, primitiveTypeName, typeTag);
+        }
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaCheckerLazyVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaCheckerLazyVisitor.java
new file mode 100644
index 0000000..44cd5b2
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaCheckerLazyVisitor.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import org.apache.asterix.om.lazy.AbstractLazyVisitablePointable;
+import org.apache.asterix.om.lazy.AbstractListLazyVisitablePointable;
+import org.apache.asterix.om.lazy.FlatLazyVisitablePointable;
+import org.apache.asterix.om.lazy.ILazyVisitablePointableVisitor;
+import org.apache.asterix.om.lazy.RecordLazyVisitablePointable;
+import org.apache.asterix.om.lazy.TypedRecordLazyVisitablePointable;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+
+public class SchemaCheckerLazyVisitor implements ISchemaChecker,
+        ILazyVisitablePointableVisitor<ISchemaChecker.SchemaComparisonType, ParquetSchemaTree.SchemaNode> {
+    private final FieldNamesDictionary fieldNamesDictionary;
+    private final RecordLazyVisitablePointable record;
+
+    public SchemaCheckerLazyVisitor(IAType typeInfo) {
+        this.fieldNamesDictionary = new FieldNamesDictionary();
+        if (typeInfo.getTypeTag() == ATypeTag.OBJECT) {
+            this.record = new TypedRecordLazyVisitablePointable((ARecordType) typeInfo);
+        } else if (typeInfo.getTypeTag() == ATypeTag.ANY) {
+            this.record = new RecordLazyVisitablePointable(true);
+        } else {
+            throw new RuntimeException("Type Unsupported for parquet printing");
+        }
+    }
+
+    @Override
+    public ISchemaChecker.SchemaComparisonType visit(RecordLazyVisitablePointable pointable,
+            ParquetSchemaTree.SchemaNode schemaNode) throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            return ISchemaChecker.SchemaComparisonType.GROWING;
+        }
+
+        if (!(schemaNode.getType() instanceof ParquetSchemaTree.RecordType)) {
+            return ISchemaChecker.SchemaComparisonType.CONFLICTING;
+        }
+
+        ParquetSchemaTree.RecordType recordType = (ParquetSchemaTree.RecordType) schemaNode.getType();
+        ISchemaChecker.SchemaComparisonType schemaComparisonType = ISchemaChecker.SchemaComparisonType.EQUIVALENT;
+
+        for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+            pointable.nextChild();
+            AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+            String childColumnName = fieldNamesDictionary.getOrCreateFieldNameIndex(pointable.getFieldName());
+            ParquetSchemaTree.SchemaNode childType = recordType.getChildren().get(childColumnName);
+            if (childType == null) {
+                schemaComparisonType =
+                        ISchemaChecker.max(schemaComparisonType, ISchemaChecker.SchemaComparisonType.GROWING);
+                continue;
+            }
+            schemaComparisonType = ISchemaChecker.max(schemaComparisonType, child.accept(this, childType));
+        }
+        return schemaComparisonType;
+    }
+
+    @Override
+    public ISchemaChecker.SchemaComparisonType visit(AbstractListLazyVisitablePointable pointable,
+            ParquetSchemaTree.SchemaNode schemaNode) throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            return ISchemaChecker.SchemaComparisonType.GROWING;
+        }
+        if (!(schemaNode.getType() instanceof ParquetSchemaTree.ListType)) {
+            return ISchemaChecker.SchemaComparisonType.CONFLICTING;
+        }
+
+        ParquetSchemaTree.ListType listType = (ParquetSchemaTree.ListType) schemaNode.getType();
+        ISchemaChecker.SchemaComparisonType schemaComparisonType = ISchemaChecker.SchemaComparisonType.EQUIVALENT;
+
+        for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+            pointable.nextChild();
+            AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+            if (listType.isEmpty()) {
+                schemaComparisonType =
+                        ISchemaChecker.max(schemaComparisonType, ISchemaChecker.SchemaComparisonType.GROWING);
+                continue;
+            }
+            schemaComparisonType = ISchemaChecker.max(schemaComparisonType, child.accept(this, listType.getChild()));
+        }
+        return schemaComparisonType;
+    }
+
+    @Override
+    public ISchemaChecker.SchemaComparisonType visit(FlatLazyVisitablePointable pointable,
+            ParquetSchemaTree.SchemaNode schemaNode) throws HyracksDataException {
+        if (schemaNode.getType() == null) {
+            return ISchemaChecker.SchemaComparisonType.GROWING;
+        }
+        if (!(schemaNode.getType() instanceof ParquetSchemaTree.FlatType)) {
+            return ISchemaChecker.SchemaComparisonType.CONFLICTING;
+        }
+
+        ParquetSchemaTree.FlatType flatType = (ParquetSchemaTree.FlatType) schemaNode.getType();
+
+        if (flatType.getPrimitiveTypeName() != AsterixParquetTypeMap.PRIMITIVE_TYPE_NAME_MAP.get(pointable.getTypeTag())
+                || !(flatType.getLogicalTypeAnnotation() == AsterixParquetTypeMap.LOGICAL_TYPE_ANNOTATION_MAP
+                        .get(pointable.getTypeTag()))) {
+            return ISchemaChecker.SchemaComparisonType.CONFLICTING;
+        }
+
+        return ISchemaChecker.SchemaComparisonType.EQUIVALENT;
+    }
+
+    @Override
+    public SchemaComparisonType checkSchema(ParquetSchemaTree.SchemaNode schemaNode, IValueReference valueReference)
+            throws HyracksDataException {
+        record.set(valueReference);
+        return record.accept(this, schemaNode);
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaConverterVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaConverterVisitor.java
new file mode 100644
index 0000000..a6ea115
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaConverterVisitor.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaBuilderUtils.getGroupChild;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaBuilderUtils.getListChild;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetSchemaBuilderUtils.getPrimitiveChild;
+
+import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.AbstractCollectionType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.om.types.IATypeVisitor;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.PrimitiveType;
+import org.apache.parquet.schema.Types;
+
+public class SchemaConverterVisitor implements IATypeVisitor<Void, Pair<Types.Builder, String>> {
+    public static String MESSAGE_NAME = "asterix_schema";
+    private final ARecordType schemaType;
+    private ATypeTag unsupportedType;
+
+    private SchemaConverterVisitor(ARecordType schemaType) {
+        this.schemaType = schemaType;
+        this.unsupportedType = null;
+    }
+
+    public static String convertToParquetSchemaString(ARecordType schemaType) throws CompilationException {
+        SchemaConverterVisitor schemaConverterVisitor = new SchemaConverterVisitor(schemaType);
+        return schemaConverterVisitor.getParquetSchema().toString();
+    }
+
+    private MessageType getParquetSchema() throws CompilationException {
+        Types.MessageTypeBuilder builder = Types.buildMessage();
+
+        for (int i = 0; i < schemaType.getFieldNames().length; i++) {
+            String fieldName = schemaType.getFieldNames()[i];
+            IAType childType = schemaType.getFieldType(fieldName);
+            childType.accept(this, new Pair<>(builder, fieldName));
+            if (unsupportedType != null) {
+                throw new CompilationException(ErrorCode.TYPE_UNSUPPORTED_PARQUET_WRITE, unsupportedType.toString());
+            }
+        }
+        return builder.named(MESSAGE_NAME);
+    }
+
+    @Override
+    public Void visit(ARecordType recordType, Pair<Types.Builder, String> arg) {
+        Types.Builder builder = arg.first;
+        String fieldName = arg.second;
+
+        Types.BaseGroupBuilder childBuilder = getGroupChild(builder);
+        for (int i = 0; i < recordType.getFieldNames().length; i++) {
+            String childFieldName = recordType.getFieldNames()[i];
+            IAType childType = recordType.getFieldType(childFieldName);
+
+            childType.accept(this, new Pair<>(childBuilder, childFieldName));
+
+        }
+        childBuilder.named(fieldName);
+
+        return null;
+    }
+
+    @Override
+    public Void visit(AbstractCollectionType collectionType, Pair<Types.Builder, String> arg) {
+        Types.Builder builder = arg.first;
+        String fieldName = arg.second;
+
+        Types.BaseListBuilder childBuilder = getListChild(builder);
+        IAType child = collectionType.getItemType();
+        child.accept(this, new Pair<>(childBuilder, fieldName));
+
+        return null;
+    }
+
+    @Override
+    public Void visit(AUnionType unionType, Pair<Types.Builder, String> arg) {
+        // Shouldn't reach here.
+        return null;
+    }
+
+    @Override
+    public Void visitFlat(IAType flatType, Pair<Types.Builder, String> arg) {
+        Types.Builder builder = arg.first;
+        String fieldName = arg.second;
+
+        PrimitiveType.PrimitiveTypeName primitiveTypeName =
+                AsterixParquetTypeMap.PRIMITIVE_TYPE_NAME_MAP.get(flatType.getTypeTag());
+
+        if (primitiveTypeName == null) {
+            unsupportedType = flatType.getTypeTag();
+        }
+
+        LogicalTypeAnnotation logicalTypeAnnotation =
+                AsterixParquetTypeMap.LOGICAL_TYPE_ANNOTATION_MAP.get(flatType.getTypeTag());
+
+        getPrimitiveChild(builder, primitiveTypeName, logicalTypeAnnotation).named(fieldName);
+
+        return null;
+    }
+
+}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
index 189c842..f6da684 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
@@ -45,8 +45,7 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.junit.Assert;
 import org.junit.Test;
-
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Point;
 
 public class ADMDataParserTest {
 
@@ -192,10 +191,10 @@
                     SerializerDeserializerProvider.INSTANCE.getNonTaggedSerializerDeserializer(BuiltinType.AGEOMETRY);
             Object point = geomDeser.deserialize(dataIn);
             Assert.assertTrue(point instanceof AGeometry);
-            Assert.assertTrue(((AGeometry) point).getGeometry() instanceof OGCPoint);
-            OGCPoint p = (OGCPoint) ((AGeometry) point).getGeometry();
-            Assert.assertEquals(3.0, p.X(), 1E-5);
-            Assert.assertEquals(4.0, p.Y(), 1E-5);
+            Assert.assertTrue(((AGeometry) point).getGeometry() instanceof Point);
+            Point p = (Point) ((AGeometry) point).getGeometry();
+            Assert.assertEquals(3.0, p.getX(), 1E-5);
+            Assert.assertEquals(4.0, p.getY(), 1E-5);
         } catch (IOException e) {
             e.printStackTrace();
             Assert.fail("Error in parsing");
diff --git a/asterixdb/asterix-geo/pom.xml b/asterixdb/asterix-geo/pom.xml
index e4cd8ff..7568d5d 100644
--- a/asterixdb/asterix-geo/pom.xml
+++ b/asterixdb/asterix-geo/pom.xml
@@ -121,8 +121,8 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>com.esri.geometry</groupId>
-      <artifactId>esri-geometry-api</artifactId>
+      <groupId>org.locationtech.jts</groupId>
+      <artifactId>jts-core</artifactId>
     </dependency>
   </dependencies>
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java
index 4676851..5d64e9d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java
@@ -42,11 +42,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
 
 /**
  * STUnion aggregates a set of objects into one object. If the input is a set of overlapping polygons, their union is
@@ -55,14 +53,13 @@
  * output is a GeometryCollection.
  */
 public class STUnionAggregateFunction extends AbstractAggregateFunction {
-    /**Use WGS 84 (EPSG:4326) as the default coordinate reference system*/
-    public static final SpatialReference DEFAULT_CRS = SpatialReference.create(4326);
     @SuppressWarnings("unchecked")
     private ISerializerDeserializer<AGeometry> geometrySerde =
             SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AGEOMETRY);
     private IPointable inputVal = new VoidPointable();
     private IScalarEvaluator eval;
-    protected OGCGeometry geometry;
+    protected Geometry geometry;
+    protected GeometryFactory geometryFactory;
 
     private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
 
@@ -75,7 +72,8 @@
     @Override
     public void init() throws HyracksDataException {
         // Initialize the resulting geometry with an empty point.
-        geometry = new OGCPoint(new Point(), DEFAULT_CRS);
+        geometryFactory = new GeometryFactory();
+        geometry = geometryFactory.createPoint((Coordinate) null);
     }
 
     @Override
@@ -91,7 +89,7 @@
             processNull();
         } else if (typeTag == ATypeTag.GEOMETRY) {
             DataInput dataIn = new DataInputStream(new ByteArrayInputStream(data, offset + 1, len - 1));
-            OGCGeometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+            Geometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
             geometry = geometry.union(geometry1);
         }
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java
index 97c30a1..8a85751 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java
@@ -26,7 +26,6 @@
 import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.om.base.ABoolean;
-import org.apache.asterix.om.base.AGeometry;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.EnumDeserializer;
@@ -40,13 +39,11 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public abstract class AbstractSTDoubleGeometryDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
-    abstract protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1)
-            throws HyracksDataException;
+    abstract protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException;
 
     @Override
     public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
@@ -106,13 +103,13 @@
                 }
 
                 DataInputStream dataIn0 = new DataInputStream(new ByteArrayInputStream(bytes0, offset0 + 1, len0 - 1));
-                OGCGeometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
+                Geometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
                 DataInputStream dataIn1 = new DataInputStream(new ByteArrayInputStream(bytes1, offset1 + 1, len1 - 1));
-                OGCGeometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
+                Geometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
                 Object finalResult = evaluateOGCGeometry(geometry0, geometry1);
-                if (finalResult instanceof OGCGeometry) {
+                if (finalResult instanceof Geometry) {
                     out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                    AGeometrySerializerDeserializer.INSTANCE.serialize(new AGeometry((OGCGeometry) finalResult), out);
+                    AGeometrySerializerDeserializer.INSTANCE.serialize((Geometry) finalResult, out);
                 } else if (finalResult instanceof Boolean) {
                     SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN)
                             .serialize((boolean) finalResult ? ABoolean.TRUE : ABoolean.FALSE, out);
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java
index 1420214..5d281ec 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java
@@ -25,7 +25,6 @@
 
 import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
 import org.apache.asterix.runtime.exceptions.InvalidDataFormatException;
@@ -38,14 +37,13 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public abstract class AbstractSTGeometryNDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
     private static final long serialVersionUID = 1L;
 
-    abstract protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException;
+    abstract protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException;
 
     @Override
     public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
@@ -101,13 +99,13 @@
 
             ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
             DataInputStream dataIn = new DataInputStream(inStream);
-            OGCGeometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+            Geometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
             int n = (int) AInt64SerializerDeserializer.getLong(data0, offset0 + 1);
 
-            OGCGeometry geometryN = evaluateOGCGeometry(geometry, n);
+            Geometry geometryN = evaluateOGCGeometry(geometry, n);
             try {
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                AGeometrySerializerDeserializer.INSTANCE.serialize(new AGeometry(geometryN), out);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(geometryN, out);
                 result.set(resultStorage);
             } catch (IOException e) {
                 throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java
index e41aebb..e5d9e38 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java
@@ -27,7 +27,6 @@
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.om.base.ABinary;
 import org.apache.asterix.om.base.ABoolean;
-import org.apache.asterix.om.base.AGeometry;
 import org.apache.asterix.om.base.AMutableInt32;
 import org.apache.asterix.om.base.ARectangle;
 import org.apache.asterix.om.types.ATypeTag;
@@ -44,12 +43,11 @@
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
 import org.apache.hyracks.util.string.UTF8StringUtil;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public abstract class AbstractSTSingleGeometryDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
-    abstract protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException;
+    abstract protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException;
 
     @Override
     public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
@@ -99,7 +97,7 @@
                 }
 
                 DataInputStream dataIn0 = new DataInputStream(new ByteArrayInputStream(bytes0, offset0 + 1, len0 - 1));
-                OGCGeometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
+                Geometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
 
                 Object finalResult = evaluateOGCGeometry(geometry0);
                 if (finalResult == null) {
@@ -120,9 +118,9 @@
                 } else if (finalResult instanceof byte[]) {
                     SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABINARY)
                             .serialize(new ABinary((byte[]) finalResult), out);
-                } else if (finalResult instanceof OGCGeometry) {
+                } else if (finalResult instanceof Geometry) {
                     out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                    AGeometrySerializerDeserializer.INSTANCE.serialize(new AGeometry((OGCGeometry) finalResult), out);
+                    AGeometrySerializerDeserializer.INSTANCE.serialize((Geometry) finalResult, out);
                 } else if (finalResult instanceof ARectangle) {
                     SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ARECTANGLE)
                             .serialize(finalResult, out);
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java
index 425a198..8df57e4 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java
@@ -23,9 +23,10 @@
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
 import org.apache.asterix.geo.evaluators.GeoFunctionTypeInferers;
 import org.apache.asterix.om.base.AOrderedList;
 import org.apache.asterix.om.base.ARecord;
@@ -48,10 +49,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import org.locationtech.jts.geom.Geometry;
 
-import com.esri.core.geometry.MapOGCStructure;
-import com.esri.core.geometry.OperatorImportFromGeoJson;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class ParseGeoJSONDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
@@ -98,7 +98,7 @@
         private DataOutput out;
         private IPointable inputArg;
         private IScalarEvaluator eval;
-        private OperatorImportFromGeoJson geoJsonImporter;
+        private ObjectMapper objectMapper;
 
         public ParseGeoJSONEvaluator(IScalarEvaluatorFactory factory, IEvaluatorContext ctx)
                 throws HyracksDataException {
@@ -106,7 +106,8 @@
             out = resultStorage.getDataOutput();
             inputArg = new VoidPointable();
             eval = factory.createScalarEvaluator(ctx);
-            geoJsonImporter = OperatorImportFromGeoJson.local();
+            objectMapper = new ObjectMapper();
+            objectMapper.registerModule(new JtsModule());
         }
 
         @Override
@@ -123,15 +124,10 @@
             ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
             DataInput dataIn = new DataInputStream(inStream);
             try {
-                String geometry = recordToString(new ARecordSerializerDeserializer(recType).deserialize(dataIn));
-                MapOGCStructure structure = geoJsonImporter.executeOGC(0, geometry, null);
-                OGCGeometry ogcGeometry =
-                        OGCGeometry.createFromOGCStructure(structure.m_ogcStructure, structure.m_spatialReference);
-                ByteBuffer buffer = ogcGeometry.asBinary();
-                byte[] wKBGeometryBuffer = buffer.array();
+                String geometryGeoJSON = recordToString(new ARecordSerializerDeserializer(recType).deserialize(dataIn));
+                Geometry geometry = objectMapper.readValue(geometryGeoJSON, Geometry.class);
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                out.writeInt(wKBGeometryBuffer.length);
-                out.write(wKBGeometryBuffer);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(geometry, out);
                 result.set(resultStorage);
             } catch (IOException e) {
                 throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java
index a8d983c..c10bbf2 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java
@@ -21,12 +21,10 @@
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptor;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STAreaDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -39,18 +37,15 @@
     };
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        double area;
-        if (!"GeometryCollection".equals(geometry.geometryType())) {
-            area = geometry.getEsriGeometry().calculateArea2D();
-        } else {
-            GeometryCursor cursor = geometry.getEsriGeometryCursor();
-            Geometry geometry1 = cursor.next();
-            area = 0;
-            while (geometry1 != null) {
-                area += geometry1.calculateArea2D();
-                geometry1 = cursor.next();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        double area = 0;
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+            for (int i = 0; i < geometry.getNumGeometries(); i++) {
+                Geometry subGeom = geometry.getGeometryN(i);
+                area += subGeom.getArea();
             }
+        } else {
+            area = geometry.getArea();
         }
         return area;
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java
index e0e43bb..9ee77c1 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java
@@ -18,12 +18,13 @@
  */
 package org.apache.asterix.geo.evaluators.functions;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.WKBWriter;
 
 public class STAsBinaryDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +32,10 @@
     public static final IFunctionDescriptorFactory FACTORY = STAsBinaryDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.asBinary().array();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        WKBWriter wkbWriter = new WKBWriter(GeoFunctionUtils.getCoordinateDimension(geometry),
+                GeoFunctionUtils.LITTLE_ENDIAN_BYTEORDER);
+        return wkbWriter.write(geometry);
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java
index 760261d..f5d2217 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java
@@ -18,21 +18,41 @@
  */
 package org.apache.asterix.geo.evaluators.functions;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.locationtech.jts.geom.Geometry;
 
-import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class STAsGeoJSONDescriptor extends AbstractSTSingleGeometryDescriptor {
 
     private static final long serialVersionUID = 1L;
     public static final IFunctionDescriptorFactory FACTORY = STAsGeoJSONDescriptor::new;
+    private static final Logger LOGGER = LogManager.getLogger();
+    private final ObjectMapper mapper;
+
+    public STAsGeoJSONDescriptor() {
+        mapper = new ObjectMapper();
+    }
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.asGeoJson();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        String geoJson = null;
+        try {
+            mapper.registerModule(new JtsModule());
+            geoJson = mapper.writeValueAsString(geometry);
+        } catch (JsonProcessingException e) {
+            LOGGER.debug("JSON Processing exception during STAsGeoJSON function");
+            throw HyracksDataException.create(ErrorCode.PARSING_ERROR);
+        }
+        return geoJson;
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java
index 12bdb30..372c73e 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java
@@ -18,12 +18,13 @@
  */
 package org.apache.asterix.geo.evaluators.functions;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.WKTWriter;
 
 public class STAsTextDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +32,13 @@
     public static final IFunctionDescriptorFactory FACTORY = STAsTextDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.asText();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        try {
+            WKTWriter wktWriter = new WKTWriter(GeoFunctionUtils.getCoordinateDimension(geometry));
+            return wktWriter.write(geometry);
+        } catch (Exception e) {
+            throw new HyracksDataException("Failed to convert geometry to WKT: " + e.getMessage(), e);
+        }
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java
index 9bd6ba1..5d6abaf 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STBoundaryDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +30,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STBoundaryDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.boundary();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        return geometry.getBoundary();
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java
index 34c21bf..e0461ff 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STContainsDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STContainsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.contains(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java
index a0c10f7..da8ee66 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java
@@ -18,12 +18,12 @@
  */
 package org.apache.asterix.geo.evaluators.functions;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STCoordDimDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +31,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STCoordDimDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.coordinateDimension();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        return GeoFunctionUtils.getCoordinateDimension(geometry);
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java
index c276e7e..f868354 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STCrossesDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STCrossesDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.crosses(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java
index 8a49d4b..3ea165f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STDifferenceDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STDifferenceDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.difference(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java
index 34d7282..38683a7 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STDimensionDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +30,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STDimensionDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.dimension();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        return geometry.getDimension();
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java
index 0a890b9..5317f49 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STDisjointDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STDisjointDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.disjoint(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java
index 75b8b41..b68a6af 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STDistanceDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STDistanceDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.distance(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java
index ab0ba5c..36bc258 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
 
 public class STEndPointDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STEndPointDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCCurve) {
-            return ((OGCCurve) geometry).endPoint();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+            return ((LineString) geometry).getEndPoint();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java
index 13555c6..2ca68fd 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STEnvelopeDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +30,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STEnvelopeDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.envelope();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        return geometry.getEnvelope();
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java
index e0c0cf8..c40ee62 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STEqualsDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STEqualsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.equals(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java
index 5888b63..71cb2b7 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Polygon;
 
 public class STExteriorRingDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STExteriorRingDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPolygon) {
-            return ((OGCPolygon) geometry).exteriorRing();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+            return ((Polygon) geometry).getExteriorRing();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java
index 73272ae..fc097ce 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java
@@ -22,8 +22,8 @@
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
@@ -40,12 +40,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.OGCStructure;
-import com.esri.core.geometry.OperatorImportFromWkt;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
 
 public class STGeomFromTextDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
@@ -77,7 +74,6 @@
         private DataOutput out;
         private IPointable inputArg;
         private IScalarEvaluator eval;
-        private OperatorImportFromWkt wktImporter;
 
         public STGeomFromTextEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
                 throws HyracksDataException {
@@ -85,7 +81,6 @@
             out = resultStorage.getDataOutput();
             inputArg = new VoidPointable();
             eval = args[0].createScalarEvaluator(ctx);
-            wktImporter = OperatorImportFromWkt.local();
         }
 
         @Override
@@ -102,19 +97,13 @@
             ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
             DataInputStream dataIn = new DataInputStream(inStream);
             try {
-                String geometry = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
-                OGCStructure structure;
-
-                structure = wktImporter.executeOGC(WktImportFlags.wktImportNonTrusted, geometry, null);
-                OGCGeometry ogcGeometry = OGCGeometry.createFromOGCStructure(structure, SpatialReference.create(4326));
-                ByteBuffer buffer = ogcGeometry.asBinary();
-                byte[] wKBGeometryBuffer = buffer.array();
+                WKTReader reader = new WKTReader();
+                String wktString = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
+                Geometry geometry = reader.read(wktString);
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                out.writeInt(wKBGeometryBuffer.length);
-                out.write(wKBGeometryBuffer);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(geometry, out);
                 result.set(resultStorage);
-
-            } catch (IOException e) {
+            } catch (ParseException | IOException e) {
                 throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
                         ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
             }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java
index aca726e..3a3dddb 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java
@@ -22,9 +22,8 @@
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 
-import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
@@ -41,12 +40,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.OGCStructure;
-import com.esri.core.geometry.OperatorImportFromWkt;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
 
 public class STGeomFromTextSRIDDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
@@ -80,7 +76,6 @@
         private IScalarEvaluator eval;
         private IPointable inputArg0;
         private IScalarEvaluator eval0;
-        private OperatorImportFromWkt wktImporter;
 
         public STGeomFromTextSRIDEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
                 throws HyracksDataException {
@@ -90,7 +85,6 @@
             eval = args[0].createScalarEvaluator(ctx);
             inputArg0 = new VoidPointable();
             eval0 = args[1].createScalarEvaluator(ctx);
-            wktImporter = OperatorImportFromWkt.local();
         }
 
         @Override
@@ -116,20 +110,13 @@
             ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
             DataInputStream dataIn = new DataInputStream(inStream);
             try {
-                String geometry = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
-                int srid = (int) AInt64SerializerDeserializer.getLong(data0, offset0 + 1);
-                OGCStructure structure;
-
-                structure = wktImporter.executeOGC(WktImportFlags.wktImportNonTrusted, geometry, null);
-                OGCGeometry ogcGeometry = OGCGeometry.createFromOGCStructure(structure, SpatialReference.create(srid));
-                ByteBuffer buffer = ogcGeometry.asBinary();
-                byte[] wKBGeometryBuffer = buffer.array();
+                WKTReader reader = new WKTReader();
+                String wktString = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
+                Geometry geometry = reader.read(wktString);
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                out.writeInt(wKBGeometryBuffer.length);
-                out.write(wKBGeometryBuffer);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(geometry, out);
                 result.set(resultStorage);
-
-            } catch (IOException e) {
+            } catch (ParseException | IOException e) {
                 throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
                         ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
             }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java
index 8a432d1..96e5931 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STGeomentryTypeDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +30,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STGeomentryTypeDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.geometryType();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        return geometry.getGeometryType();
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java
index 5d9642e..4c8cc9a 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java
@@ -20,11 +20,10 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
+import org.locationtech.jts.geom.Geometry;
 
 public class STGeometryNDescriptor extends AbstractSTGeometryNDescriptor {
 
@@ -38,12 +37,12 @@
     }
 
     @Override
-    protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException {
-        if (geometry instanceof OGCGeometryCollection) {
-            return ((OGCGeometryCollection) geometry).geometryN(n);
+    protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+            return geometry.getGeometryN(n);
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java
index 5f6d956..9dc8bd9 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Polygon;
 
 public class STInteriorRingNDescriptor extends AbstractSTGeometryNDescriptor {
 
@@ -38,12 +38,12 @@
     }
 
     @Override
-    protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException {
-        if (geometry instanceof OGCPolygon) {
-            return ((OGCPolygon) geometry).interiorRingN(n);
+    protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+            return ((Polygon) geometry).getInteriorRingN(n);
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java
index 5a3d2bf..6888b2b 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STIntersectionDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STIntersectionDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.intersection(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java
index 34a1407..a8a4b78 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STIntersectsDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STIntersectsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.intersects(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java
index ff55f9f..cedda05 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java
@@ -22,15 +22,10 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
-import com.esri.core.geometry.ogc.OGCMultiCurve;
-import com.esri.core.geometry.ogc.OGCMultiPoint;
-import com.esri.core.geometry.ogc.OGCMultiPolygon;
-import com.esri.core.geometry.ogc.OGCPoint;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
 
 public class STIsClosedDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -38,30 +33,36 @@
     public static final IFunctionDescriptorFactory FACTORY = STIsClosedDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
         return isClosed(geometry);
     }
 
-    private boolean isClosed(OGCGeometry geometry) {
-        if (geometry instanceof OGCCurve) {
-            return ((OGCCurve) geometry).isClosed();
-        } else if (geometry instanceof OGCMultiCurve) {
-            return ((OGCMultiCurve) geometry).isClosed();
-        } else if (geometry instanceof OGCMultiPoint || geometry instanceof OGCMultiPolygon
-                || geometry instanceof OGCPolygon || geometry instanceof OGCPoint) {
-            return true;
-        } else if (geometry instanceof OGCGeometryCollection) {
-            OGCGeometryCollection geometryCollection = (OGCGeometryCollection) geometry;
-            int num = geometryCollection.numGeometries();
-            for (int i = 0; i < num; ++i) {
-                if (!isClosed(geometryCollection.geometryN(i))) {
-                    return false;
+    private boolean isClosed(Geometry geometry) {
+        switch (geometry.getGeometryType()) {
+            case Geometry.TYPENAME_LINESTRING:
+                return ((LineString) geometry).isClosed();
+
+            case Geometry.TYPENAME_MULTILINESTRING:
+                return ((MultiLineString) geometry).isClosed();
+
+            case Geometry.TYPENAME_MULTIPOINT:
+            case Geometry.TYPENAME_MULTIPOLYGON:
+            case Geometry.TYPENAME_POLYGON:
+            case Geometry.TYPENAME_POINT:
+                return true;
+
+            case Geometry.TYPENAME_GEOMETRYCOLLECTION:
+                GeometryCollection geometryCollection = (GeometryCollection) geometry;
+                int num = geometryCollection.getNumGeometries();
+                for (int i = 0; i < num; ++i) {
+                    if (!isClosed(geometryCollection.getGeometryN(i))) {
+                        return false;
+                    }
                 }
-            }
-            return true;
-        } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+                return true;
+
+            default:
+                throw new IllegalArgumentException("Unsupported geometry type: " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java
index 544fdf3..f6a8578 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STIsCollectionDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +30,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STIsCollectionDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        String type = geometry.geometryType();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        String type = geometry.getGeometryType();
         return "GeometryCollection".equals(type) || "MultiLineString".equals(type) || "MultiPoint".equals(type)
                 || "MultiPolygon".equals(type);
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java
index 1adcb0e..569806c 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STIsEmptyDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STIsEmptyDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
         return geometry.isEmpty();
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java
index 02ea049a..0c84c1c 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
 
 public class STIsRingDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STIsRingDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCCurve) {
-            return ((OGCCurve) geometry).isRing();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+            return ((LineString) geometry).isRing();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
 
         }
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java
index b8b2d63..2b6c468 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STIsSimpleDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STIsSimpleDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
         return geometry.isSimple();
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java
index 7357813..8909ed3 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java
@@ -20,14 +20,12 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
-import com.esri.core.geometry.ogc.OGCMultiLineString;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
 
 public class STLengthDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -35,21 +33,20 @@
     public static final IFunctionDescriptorFactory FACTORY = STLengthDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCLineString) {
-            return geometry.getEsriGeometry().calculateLength2D();
-        } else if (geometry instanceof OGCMultiLineString) {
-            GeometryCursor cursor = geometry.getEsriGeometryCursor();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+            return geometry.getLength();
+        } else if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTILINESTRING)) {
             double length = 0;
-            Geometry geometry1 = cursor.next();
-            while (geometry1 != null) {
-                length += geometry1.calculateLength2D();
-                geometry1 = cursor.next();
+            MultiLineString multiLine = (MultiLineString) geometry;
+            for (int i = 0; i < multiLine.getNumGeometries(); i++) {
+                LineString lineString = (LineString) multiLine.getGeometryN(i);
+                length += lineString.getLength();
             }
             return length;
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java
index b716b5c..82e1c3d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java
@@ -20,35 +20,42 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.Polyline;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
-import com.esri.core.geometry.ogc.OGCMultiPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.MultiPoint;
 
 public class STLineFromMultiPointDescriptor extends AbstractSTSingleGeometryDescriptor {
 
     private static final long serialVersionUID = 1L;
     public static final IFunctionDescriptorFactory FACTORY = STLineFromMultiPointDescriptor::new;
+    private final GeometryFactory geometryFactory;
+
+    public STLineFromMultiPointDescriptor() {
+        geometryFactory = new GeometryFactory();
+    }
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCMultiPoint) {
-            Polyline polyline = new Polyline();
-            OGCMultiPoint multiPoint = (OGCMultiPoint) geometry;
-            int numPoints = multiPoint.numGeometries();
-            polyline.startPath((Point) multiPoint.geometryN(0).getEsriGeometry());
-            for (int i = 1; i < numPoints; i++) {
-                polyline.lineTo((Point) multiPoint.geometryN(i).getEsriGeometry());
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTIPOINT)) {
+            MultiPoint multiPoint = (MultiPoint) geometry;
+            int numPoints = multiPoint.getNumGeometries();
+
+            if (numPoints < 2) {
+                throw new UnsupportedOperationException("A LineString requires at least two points.");
             }
-            return new OGCLineString(polyline, 0, SpatialReference.create(4326));
+
+            Coordinate[] coordinates = new Coordinate[numPoints];
+            for (int i = 0; i < numPoints; i++) {
+                coordinates[i] = multiPoint.getGeometryN(i).getCoordinate();
+            }
+            return geometryFactory.createLineString(coordinates);
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java
index 55e358c..6353918 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java
@@ -25,9 +25,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
 
 public class STMBRDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -41,14 +40,12 @@
     };
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
         AMutableRectangle aRectangle = new AMutableRectangle(null, null);
         AMutablePoint[] aPoint = { new AMutablePoint(0, 0), new AMutablePoint(0, 0) };
-        Envelope env = new Envelope();
-        geometry.getEsriGeometry().queryEnvelope(env);
-        aPoint[0].setValue(env.getXMin(), env.getYMin());
-        aPoint[1].setValue(env.getXMax(), env.getYMax());
+        Envelope envelope = geometry.getEnvelopeInternal();
+        aPoint[0].setValue(envelope.getMinX(), envelope.getMinY());
+        aPoint[1].setValue(envelope.getMaxX(), envelope.getMaxY());
         aRectangle.setValue(aPoint[0], aPoint[1]);
         return aRectangle;
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java
index b3bfb33..50ddd28 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java
@@ -46,9 +46,9 @@
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.data.std.util.ByteArrayAccessibleInputStream;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
 
 public class STMBREnlargeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
@@ -61,6 +61,12 @@
         }
     };
 
+    private final GeometryFactory geometryFactory;
+
+    public STMBREnlargeDescriptor() {
+        geometryFactory = new GeometryFactory();
+    }
+
     @Override
     public FunctionIdentifier getIdentifier() {
         return BuiltinFunctions.ST_MBR_ENLARGE;
@@ -114,14 +120,13 @@
                         }
 
                         inStream.setContent(data0, offset0 + 1, len - 1);
-                        OGCGeometry geometry =
-                                AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
-                        geometry.getEsriGeometry().queryEnvelope(env);
+                        Geometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+                        Envelope env = geometry.getEnvelopeInternal();
                         double expandValue =
                                 ATypeHierarchy.getDoubleValue(getIdentifier().getName(), 0, data1, offset1);
                         AMutableRectangle expandedMBR = new AMutableRectangle(
-                                new AMutablePoint(env.getXMin() - expandValue, env.getYMin() - expandValue),
-                                new AMutablePoint(env.getXMax() + expandValue, env.getYMax() + expandValue));
+                                new AMutablePoint(env.getMinX() - expandValue, env.getMinY() - expandValue),
+                                new AMutablePoint(env.getMaxX() + expandValue, env.getMaxY() + expandValue));
                         rectangleSerde.serialize(expandedMBR, out);
                         result.set(resultStorage);
                     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java
index e208771..71fcacf 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java
@@ -20,24 +20,36 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
 
 public class STMDescriptor extends AbstractSTSingleGeometryDescriptor {
 
     private static final long serialVersionUID = 1L;
     public static final IFunctionDescriptorFactory FACTORY = STMDescriptor::new;
+    private static final Logger LOGGER = LogManager.getLogger();
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPoint) {
-            return ((OGCPoint) geometry).M();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+            Point point = (Point) geometry;
+            Coordinate coordinate = point.getCoordinate();
+            if (coordinate instanceof CoordinateXYZM) {
+                return coordinate.getM();
+            } else {
+                LOGGER.debug("The provided point does not have an M value.");
+                return Double.NaN;
+            }
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java
index 3aa9e7b..f8cc270 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java
@@ -20,8 +20,8 @@
 
 import java.io.DataOutput;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.asterix.om.types.ATypeTag;
@@ -35,10 +35,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Polygon;
 
 public class STMakeEnvelopeDescriptorSRID extends AbstractGetValDescriptor {
 
@@ -70,14 +69,19 @@
         private DataOutput out;
         private IPointable inputArg0;
         private IScalarEvaluator eval0;
+        private Coordinate coordinate0;
         private IPointable inputArg1;
         private IScalarEvaluator eval1;
+        private Coordinate coordinate1;
         private IPointable inputArg2;
         private IScalarEvaluator eval2;
+        private Coordinate coordinate2;
         private IPointable inputArg3;
         private IScalarEvaluator eval3;
+        private Coordinate coordinate3;
         private IPointable inputArg4;
         private IScalarEvaluator eval4;
+        private final GeometryFactory geometryFactory;
 
         public STMakeEnvelopeEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
                 throws HyracksDataException {
@@ -93,6 +97,11 @@
             eval3 = args[3].createScalarEvaluator(ctx);
             inputArg4 = new VoidPointable();
             eval4 = args[4].createScalarEvaluator(ctx);
+            geometryFactory = new GeometryFactory();
+            coordinate0 = new Coordinate();
+            coordinate1 = new Coordinate();
+            coordinate2 = new Coordinate();
+            coordinate3 = new Coordinate();
         }
 
         @Override
@@ -116,23 +125,30 @@
             byte[] data3 = inputArg3.getByteArray();
             int offset3 = inputArg3.getStartOffset();
 
+            //Spatial Reference System Identifier (SRID), currently not used.
             eval4.evaluate(tuple, inputArg4);
             byte[] data4 = inputArg4.getByteArray();
             int offset4 = inputArg4.getStartOffset();
 
             try {
+                double xmin = getVal(data0, offset0);
+                double ymin = getVal(data1, offset1);
+                double xmax = getVal(data2, offset2);
+                double ymax = getVal(data3, offset3);
+                coordinate0.setX(xmin);
+                coordinate0.setY(ymin);
+                coordinate1.setX(xmin);
+                coordinate1.setY(ymax);
+                coordinate2.setX(xmax);
+                coordinate2.setY(ymax);
+                coordinate3.setX(xmax);
+                coordinate3.setY(ymin);
 
-                OGCGeometry ogcGeometry =
-                        OGCGeometry
-                                .createFromEsriGeometry(
-                                        new Envelope(getVal(data0, offset0), getVal(data1, offset1),
-                                                getVal(data2, offset2), getVal(data3, offset3)),
-                                        SpatialReference.create((int) getVal(data4, offset4)));
-                ByteBuffer buffer = ogcGeometry.asBinary();
-                byte[] bytes = buffer.array();
+                Coordinate[] coords =
+                        new Coordinate[] { coordinate0, coordinate1, coordinate2, coordinate3, coordinate0 };
+                Polygon polygon = geometryFactory.createPolygon(coords);
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                out.writeInt(bytes.length);
-                out.write(bytes);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(polygon, out);
                 result.set(resultStorage);
             } catch (IOException e) {
                 throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java
index 4a5cf7e..8009510 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java
@@ -22,7 +22,6 @@
 import java.io.IOException;
 
 import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.asterix.om.types.ATypeTag;
@@ -35,10 +34,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
 
 public class STMakePoint3DDescriptor extends AbstractGetValDescriptor {
 
@@ -73,7 +71,7 @@
         private final IScalarEvaluator eval1;
         private final IScalarEvaluator eval2;
         private Point point;
-        private AGeometry pointGeometry;
+        private final GeometryFactory geometryFactory;
 
         public STMakePoint3DEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
                 throws HyracksDataException {
@@ -85,8 +83,7 @@
             eval0 = args[0].createScalarEvaluator(ctx);
             eval1 = args[1].createScalarEvaluator(ctx);
             eval2 = args[2].createScalarEvaluator(ctx);
-            point = new Point(0, 0, 0);
-            pointGeometry = new AGeometry(new OGCPoint(point, SpatialReference.create(4326)));
+            geometryFactory = new GeometryFactory();
         }
 
         @Override
@@ -105,10 +102,10 @@
             resultStorage.reset();
             try {
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                point.setX(getVal(bytes0, offset0));
-                point.setY(getVal(bytes1, offset1));
-                point.setZ(getVal(bytes2, offset2));
-                AGeometrySerializerDeserializer.INSTANCE.serialize(pointGeometry, out);
+                Coordinate coordinate =
+                        new Coordinate(getVal(bytes0, offset0), getVal(bytes1, offset1), getVal(bytes2, offset2));
+                point = geometryFactory.createPoint(coordinate);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(point, out);
             } catch (IOException e1) {
                 throw HyracksDataException.create(e1);
             }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java
index 55baa37..edc0cdc 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java
@@ -22,7 +22,6 @@
 import java.io.IOException;
 
 import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.asterix.om.types.ATypeTag;
@@ -35,11 +34,15 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
 
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCPoint;
-
+/**
+ * TODO: Support writing geometry with 4 dimensions
+ * JTS WKBWriter currently does not support writing 4 dimensions and loses the detail.
+ * See https://github.com/locationtech/jts/issues/733 for more details.
+ */
 public class STMakePoint3DWithMDescriptor extends AbstractGetValDescriptor {
 
     private static final long serialVersionUID = 1L;
@@ -75,7 +78,7 @@
         private final IScalarEvaluator eval2;
         private final IScalarEvaluator eval3;
         private Point point;
-        private AGeometry pointGeometry;
+        private final GeometryFactory geometryFactory;
 
         public STMakePoint3DWithMEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
                 throws HyracksDataException {
@@ -89,8 +92,7 @@
             eval1 = args[1].createScalarEvaluator(ctx);
             eval2 = args[2].createScalarEvaluator(ctx);
             eval3 = args[3].createScalarEvaluator(ctx);
-            point = new Point(0, 0, 0);
-            pointGeometry = new AGeometry(new OGCPoint(point, SpatialReference.create(4326)));
+            geometryFactory = new GeometryFactory();
         }
 
         @Override
@@ -112,11 +114,10 @@
             resultStorage.reset();
             try {
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                point.setX(getVal(bytes0, offset0));
-                point.setY(getVal(bytes1, offset1));
-                point.setZ(getVal(bytes2, offset2));
-                point.setM(getVal(bytes3, offset3));
-                AGeometrySerializerDeserializer.INSTANCE.serialize(pointGeometry, out);
+                CoordinateXYZM coordinate = new CoordinateXYZM(getVal(bytes0, offset0), getVal(bytes1, offset1),
+                        getVal(bytes2, offset2), getVal(bytes3, offset3));
+                point = geometryFactory.createPoint(coordinate);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(point, out);
             } catch (IOException e1) {
                 throw HyracksDataException.create(e1);
             }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java
index c3b7b36..f21b381 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java
@@ -22,7 +22,6 @@
 import java.io.IOException;
 
 import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.asterix.om.types.ATypeTag;
@@ -35,10 +34,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
 
 public class STMakePointDescriptor extends AbstractGetValDescriptor {
 
@@ -71,7 +69,7 @@
         private final IScalarEvaluator eval0;
         private final IScalarEvaluator eval1;
         private Point point;
-        private AGeometry pointGeometry;
+        private final GeometryFactory geometryFactory;
 
         public STMakePointEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx) throws HyracksDataException {
             resultStorage = new ArrayBackedValueStorage();
@@ -80,8 +78,7 @@
             inputArg1 = new VoidPointable();
             eval0 = args[0].createScalarEvaluator(ctx);
             eval1 = args[1].createScalarEvaluator(ctx);
-            point = new Point(0, 0);
-            pointGeometry = new AGeometry(new OGCPoint(point, SpatialReference.create(4326)));
+            geometryFactory = new GeometryFactory();
         }
 
         @Override
@@ -97,9 +94,9 @@
             resultStorage.reset();
             try {
                 out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
-                point.setX(getVal(bytes0, offset0));
-                point.setY(getVal(bytes1, offset1));
-                AGeometrySerializerDeserializer.INSTANCE.serialize(pointGeometry, out);
+                Coordinate coordinate = new Coordinate(getVal(bytes0, offset0), getVal(bytes1, offset1));
+                point = geometryFactory.createPoint(coordinate);
+                AGeometrySerializerDeserializer.INSTANCE.serialize(point, out);
             } catch (IOException e1) {
                 throw HyracksDataException.create(e1);
             }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java
index 309d325..c80c513 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java
@@ -20,14 +20,13 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.MultiVertexGeometry;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Polygon;
 
 public class STNPointsDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -35,31 +34,45 @@
     public static final IFunctionDescriptorFactory FACTORY = STNPointsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        Geometry esriGeometry = geometry.getEsriGeometry();
-        if (esriGeometry instanceof MultiVertexGeometry) {
-            return ((MultiVertexGeometry) esriGeometry).getPointCount();
-        } else if (esriGeometry instanceof Point) {
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (geometry == null) {
+            return 0;
+        }
+        if (geometry.isEmpty()) {
+            return 0;
+        }
+
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT))
             return 1;
-        } else if (esriGeometry == null) {
-            int count = 0;
-            GeometryCursor geometryCursor = geometry.getEsriGeometryCursor();
-            esriGeometry = geometryCursor.next();
-            while (esriGeometry != null) {
-                if (esriGeometry instanceof MultiVertexGeometry) {
-                    count += ((MultiVertexGeometry) esriGeometry).getPointCount();
-                } else if (esriGeometry instanceof Point) {
-                    count += 1;
-                }
-                esriGeometry = geometryCursor.next();
+
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+            Polygon polygon = (Polygon) geometry;
+            int count = polygon.getExteriorRing().getCoordinates().length - 1;
+            for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
+                count += polygon.getInteriorRingN(i).getCoordinates().length - 1;
             }
             return count;
-        } else if (geometry.isEmpty()) {
-            return 0;
-        } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
         }
+
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTIPOLYGON)) {
+            int count = 0;
+            MultiPolygon multiPolygon = (MultiPolygon) geometry;
+            for (int i = 0; i < multiPolygon.getNumGeometries(); i++) {
+                count += (int) evaluateOGCGeometry(multiPolygon.getGeometryN(i));
+            }
+            return count;
+        }
+
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+            int count = 0;
+            GeometryCollection collection = (GeometryCollection) geometry;
+            for (int i = 0; i < collection.getNumGeometries(); i++) {
+                count += (int) evaluateOGCGeometry(collection.getGeometryN(i));
+            }
+            return count;
+        }
+
+        return geometry.getCoordinates().length;
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java
index f8e313d..ee21878 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java
@@ -20,12 +20,12 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCMultiPolygon;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Polygon;
 
 public class STNRingsDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -33,22 +33,22 @@
     public static final IFunctionDescriptorFactory FACTORY = STNRingsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPolygon) {
-            return ((OGCPolygon) geometry).numInteriorRing() + 1;
-        } else if (geometry instanceof OGCMultiPolygon) {
-            OGCMultiPolygon polygon = (OGCMultiPolygon) geometry;
-            int numGeometries = polygon.numGeometries();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+            return ((Polygon) geometry).getNumInteriorRing() + 1;
+        } else if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTIPOLYGON)) {
+            MultiPolygon polygon = (MultiPolygon) geometry;
+            int numGeometries = polygon.getNumGeometries();
             int count = 0;
             for (int i = 1; i < numGeometries + 1; i++) {
-                if (polygon.geometryN(i) instanceof OGCPolygon) {
-                    count += ((OGCPolygon) polygon.geometryN(i)).numInteriorRing() + 1;
+                if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+                    count += ((Polygon) polygon.getGeometryN(i)).getNumInteriorRing() + 1;
                 }
             }
             return count;
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java
index e87d1f4..938f591 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java
@@ -20,11 +20,10 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
+import org.locationtech.jts.geom.Geometry;
 
 public class STNumGeometriesDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,9 +31,9 @@
     public static final IFunctionDescriptorFactory FACTORY = STNumGeometriesDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCGeometryCollection) {
-            return ((OGCGeometryCollection) geometry).numGeometries();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+            return geometry.getNumGeometries();
         } else if (!geometry.isEmpty()) {
             return 1;
         } else {
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java
index 19f2e11..c4f97f3 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Polygon;
 
 public class STNumInteriorRingsDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STNumInteriorRingsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPolygon) {
-            return ((OGCPolygon) geometry).numInteriorRing();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+            return ((Polygon) geometry).getNumInteriorRing();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java
index 61286b0..509566a 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STOverlapsDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STOverlapsDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.overlaps(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java
index e0495f5..1689f9d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
 
 public class STPointNDescriptor extends AbstractSTGeometryNDescriptor {
 
@@ -38,12 +38,12 @@
     }
 
     @Override
-    protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException {
-        if (geometry instanceof OGCLineString) {
-            return ((OGCLineString) geometry).pointN(n);
+    protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+            return ((LineString) geometry).getPointN(n);
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java
index e2fcd8a..3683e1b 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java
@@ -49,11 +49,9 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCConcreteGeometryCollection;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.GeometryFactory;
 
 public class STPolygonizeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
@@ -61,6 +59,12 @@
 
     private static final long serialVersionUID = 1L;
 
+    private final GeometryFactory geometryFactory;
+
+    public STPolygonizeDescriptor() {
+        geometryFactory = new GeometryFactory();
+    }
+
     @Override
     public FunctionIdentifier getIdentifier() {
         return BuiltinFunctions.ST_POLYGONIZE;
@@ -116,13 +120,13 @@
             ByteArrayInputStream inStream = new ByteArrayInputStream(bytes, offset + 1, len - 1);
             DataInputStream dataIn = new DataInputStream(inStream);
             IACursor cursor = ((IACollection) serde.deserialize(dataIn)).getCursor();
-            List<OGCGeometry> list = new ArrayList<>();
+            List<Geometry> list = new ArrayList<>();
             while (cursor.next()) {
                 IAObject object = cursor.get();
                 list.add(((AGeometry) object).getGeometry());
             }
-            OGCGeometryCollection geometryCollection =
-                    new OGCConcreteGeometryCollection(list, SpatialReference.create(4326));
+            GeometryCollection geometryCollection =
+                    geometryFactory.createGeometryCollection(list.toArray(new Geometry[0]));
             try {
                 SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AGEOMETRY)
                         .serialize(new AGeometry(geometryCollection), out);
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java
index c3dc132..bc9c0b6 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java
@@ -44,8 +44,7 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STRelateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
 
@@ -128,9 +127,9 @@
             DataInputStream dataIn = new DataInputStream(inStream);
             String matrix = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
             DataInputStream dataIn0 = new DataInputStream(new ByteArrayInputStream(bytes0, offset0 + 1, len0 - 1));
-            OGCGeometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
+            Geometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
             DataInputStream dataIn1 = new DataInputStream(new ByteArrayInputStream(bytes1, offset1 + 1, len1 - 1));
-            OGCGeometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
+            Geometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
             try {
                 boolean val = geometry0.relate(geometry1, matrix);
                 SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN)
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java
index e50678a..e19f21a 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STSRIDDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +30,8 @@
     public static final IFunctionDescriptorFactory FACTORY = STSRIDDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.SRID();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        return geometry.getSRID();
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java
index 127b0d5..71d3470 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
 
 public class STStartPointDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STStartPointDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCCurve) {
-            return ((OGCCurve) geometry).startPoint();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+            return ((LineString) geometry).getStartPoint();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java
index e4d0507..b04254e 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STSymDifferenceDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STSymDifferenceDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.symDifference(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java
index ddf6550..0615f61 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STTouchesDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STTouchesDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.touches(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java
index 1cfea10..533a3c1 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STUnionDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STUnionDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.union(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java
index c78e48f..a3687ac 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java
@@ -22,8 +22,7 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class STWithinDescriptor extends AbstractSTDoubleGeometryDescriptor {
 
@@ -31,7 +30,7 @@
     public static final IFunctionDescriptorFactory FACTORY = STWithinDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+    protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
         return geometry0.within(geometry1);
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java
index 608e55cc..2bb450f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
 
 public class STXDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STXDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPoint) {
-            return ((OGCPoint) geometry).X();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+            return ((Point) geometry).getX();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java
index 2431f3f..1ad10ba 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java
@@ -22,10 +22,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
 
 public class STXMaxDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -33,15 +31,13 @@
     public static final IFunctionDescriptorFactory FACTORY = STXMaxDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        Geometry esriGeom = geometry.getEsriGeometry();
-        if (esriGeom != null) {
-            Envelope env = new Envelope();
-            esriGeom.queryEnvelope(env);
-            return env.getXMax();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        Envelope env = geometry.getEnvelopeInternal();
+        if (env != null) {
+            return env.getMaxX();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
 
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java
index 66db0c6..ad47b30 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java
@@ -22,10 +22,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
 
 public class STXMinDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -33,17 +31,14 @@
     public static final IFunctionDescriptorFactory FACTORY = STXMinDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        Geometry esriGeom = geometry.getEsriGeometry();
-        if (esriGeom != null) {
-            Envelope env = new Envelope();
-            esriGeom.queryEnvelope(env);
-            return env.getXMin();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        Envelope env = geometry.getEnvelopeInternal();
+        if (env != null) {
+            return env.getMinX();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
-
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java
index 32baa0a..df5a57d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java
@@ -20,11 +20,11 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
 
 public class STYDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -32,12 +32,12 @@
     public static final IFunctionDescriptorFactory FACTORY = STYDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPoint) {
-            return ((OGCPoint) geometry).Y();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+            return ((Point) geometry).getY();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java
index 7b82915..b5aee1f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java
@@ -22,10 +22,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
 
 public class STYMaxDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -33,15 +31,13 @@
     public static final IFunctionDescriptorFactory FACTORY = STYMaxDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        Geometry esriGeom = geometry.getEsriGeometry();
-        if (esriGeom != null) {
-            Envelope env = new Envelope();
-            esriGeom.queryEnvelope(env);
-            return env.getYMax();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        Envelope env = geometry.getEnvelopeInternal();
+        if (env != null) {
+            return env.getMaxY();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
 
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java
index 1cc5e88..921761f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java
@@ -22,10 +22,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
 
 public class STYMinDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -33,15 +31,13 @@
     public static final IFunctionDescriptorFactory FACTORY = STYMinDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        Geometry esriGeom = geometry.getEsriGeometry();
-        if (esriGeom != null) {
-            Envelope env = new Envelope();
-            esriGeom.queryEnvelope(env);
-            return env.getYMin();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        Envelope env = geometry.getEnvelopeInternal();
+        if (env != null) {
+            return env.getMinY();
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
 
     }
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java
index 739775a..eae150c 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java
@@ -20,24 +20,32 @@
 
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
 
 public class STZDescriptor extends AbstractSTSingleGeometryDescriptor {
 
     private static final long serialVersionUID = 1L;
     public static final IFunctionDescriptorFactory FACTORY = STZDescriptor::new;
+    private static final Logger LOGGER = LogManager.getLogger();
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        if (geometry instanceof OGCPoint) {
-            return ((OGCPoint) geometry).Z();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+            Point point = (Point) geometry;
+            double z = point.getCoordinate().getZ();
+            if (Double.isNaN(z)) {
+                LOGGER.debug("No Z-coordinate available for the given point.");
+            }
+            return z;
         } else {
-            throw new UnsupportedOperationException(
-                    "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+            throw new UnsupportedOperationException("The operation " + getIdentifier()
+                    + " is not supported for the type " + geometry.getGeometryType());
         }
     }
 
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java
index cb3f54e..fa80241 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java
@@ -22,8 +22,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
 
 public class STZMaxDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +31,18 @@
     public static final IFunctionDescriptorFactory FACTORY = STZMaxDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.MaxZ();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        double maxZ = Double.NEGATIVE_INFINITY;
+        boolean foundZ = false;
+        for (Coordinate coord : geometry.getCoordinates()) {
+            if (!Double.isNaN(coord.getZ())) {
+                foundZ = true;
+                if (coord.getZ() > maxZ) {
+                    maxZ = coord.getZ();
+                }
+            }
+        }
+        return foundZ ? maxZ : Double.NaN;
     }
 
     @Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java
index 3f6f6b3..ba637d4 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java
@@ -22,8 +22,8 @@
 import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
 
 public class STZMinDescriptor extends AbstractSTSingleGeometryDescriptor {
 
@@ -31,8 +31,19 @@
     public static final IFunctionDescriptorFactory FACTORY = STZMinDescriptor::new;
 
     @Override
-    protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-        return geometry.MinZ();
+    protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+        double minZ = Double.POSITIVE_INFINITY;
+        boolean foundZ = false;
+        for (Coordinate coord : geometry.getCoordinates()) {
+            if (!Double.isNaN(coord.getZ())) {
+                foundZ = true;
+                if (coord.getZ() < minZ) {
+                    minZ = coord.getZ();
+                }
+            }
+        }
+
+        return foundZ ? minZ : Double.NaN;
     }
 
     @Override
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java
index 599d528..5c89a9f 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java
@@ -32,6 +32,8 @@
 import org.apache.asterix.lang.common.base.IReturningStatement;
 import org.apache.asterix.lang.common.clause.OrderbyClause;
 import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.RecordTypeDefinition;
+import org.apache.asterix.lang.common.expression.TypeExpression;
 import org.apache.asterix.lang.common.expression.VariableExpr;
 import org.apache.asterix.lang.common.literal.StringLiteral;
 import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
@@ -52,13 +54,15 @@
     private List<Expression> partitionExpressions;
     private List<Expression> orderByList;
     private int varCounter;
+    private RecordTypeDefinition itemType;
+    private TypeExpression typeExpressionItemType;
 
     public CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
             ExternalDetailsDecl externalDetailsDecl, int varCounter, List<Expression> keyExpressions,
             boolean autogenerated) {
         this(namespace, datasetName, query, sourceVariable, externalDetailsDecl, new ArrayList<>(), new ArrayList<>(),
                 new HashMap<>(), new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), varCounter, keyExpressions,
-                autogenerated);
+                autogenerated, null, null);
     }
 
     public CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
@@ -68,7 +72,18 @@
             List<OrderbyClause.NullOrderModifier> orderByNullModifierList, int varCounter) {
         this(namespace, datasetName, query, sourceVariable, externalDetailsDecl, pathExpressions, partitionExpressions,
                 partitionsVariables, orderbyList, orderByModifiers, orderByNullModifierList, varCounter,
-                new ArrayList<>(), false);
+                new ArrayList<>(), false, null, null);
+    }
+
+    public CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
+            ExternalDetailsDecl externalDetailsDecl, List<Expression> pathExpressions,
+            List<Expression> partitionExpressions, Map<Integer, VariableExpr> partitionsVariables,
+            List<Expression> orderbyList, List<OrderbyClause.OrderModifier> orderByModifiers,
+            List<OrderbyClause.NullOrderModifier> orderByNullModifierList, int varCounter,
+            TypeExpression typeExpressionItemType, RecordTypeDefinition itemType) {
+        this(namespace, datasetName, query, sourceVariable, externalDetailsDecl, pathExpressions, partitionExpressions,
+                partitionsVariables, orderbyList, orderByModifiers, orderByNullModifierList, varCounter,
+                new ArrayList<>(), false, typeExpressionItemType, itemType);
     }
 
     private CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
@@ -76,7 +91,8 @@
             List<Expression> partitionExpressions, Map<Integer, VariableExpr> partitionsVariables,
             List<Expression> orderbyList, List<OrderbyClause.OrderModifier> orderByModifiers,
             List<OrderbyClause.NullOrderModifier> orderByNullModifierList, int varCounter,
-            List<Expression> keyExpressions, boolean autogenerated) {
+            List<Expression> keyExpressions, boolean autogenerated, TypeExpression typeExpressionItemType,
+            RecordTypeDefinition itemType) {
         this.namespace = namespace;
         this.datasetName = datasetName;
         this.query = query;
@@ -91,6 +107,8 @@
         this.varCounter = varCounter;
         this.keyExpressions = keyExpressions != null ? keyExpressions : new ArrayList<>();
         this.autogenerated = autogenerated;
+        this.itemType = itemType;
+        this.typeExpressionItemType = typeExpressionItemType;
 
         if (pathExpressions.isEmpty()) {
             // Ensure path expressions to have at least an empty string
@@ -117,6 +135,10 @@
         this.namespace = namespace;
     }
 
+    public RecordTypeDefinition getType() {
+        return itemType;
+    }
+
     public Namespace getNamespace() {
         return namespace;
     }
@@ -192,6 +214,10 @@
         return !orderByList.isEmpty();
     }
 
+    public TypeExpression getTypeExpressionItemType() {
+        return typeExpressionItemType;
+    }
+
     @Override
     public int getVarCounter() {
         return varCounter;
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ExternalDetailsDecl.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ExternalDetailsDecl.java
index db599c4..e1c978a 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ExternalDetailsDecl.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ExternalDetailsDecl.java
@@ -20,9 +20,12 @@
 
 import java.util.Map;
 
+import org.apache.asterix.om.types.ARecordType;
+
 public class ExternalDetailsDecl implements IDatasetDetailsDecl {
     private Map<String, String> properties;
     private String adapter;
+    private ARecordType itemType;
 
     public void setAdapter(String adapter) {
         this.adapter = adapter;
@@ -32,6 +35,14 @@
         this.properties = properties;
     }
 
+    public void setItemType(ARecordType itemType) {
+        this.itemType = itemType;
+    }
+
+    public ARecordType getItemType() {
+        return itemType;
+    }
+
     public String getAdapter() {
         return adapter;
     }
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java
index c9f8640..df7d6d8 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java
@@ -263,12 +263,17 @@
                             Triple<DatasetFullyQualifiedName, Boolean, DatasetFullyQualifiedName> dsArgs =
                                     FunctionUtil.parseDatasetFunctionArguments(functionCall);
                             DatasetFullyQualifiedName datasetFullyQualifiedName = dsArgs.first;
-                            EntityDetails.EntityType entityType =
-                                    dsArgs.second ? EntityDetails.EntityType.VIEW : EntityDetails.EntityType.DATASET;
-                            metadataProvider
-                                    .addAccessedEntity(new EntityDetails(datasetFullyQualifiedName.getDatabaseName(),
-                                            datasetFullyQualifiedName.getDataverseName(),
-                                            datasetFullyQualifiedName.getDatasetName(), entityType));
+                            if (dsArgs.second) {
+                                metadataProvider.addAccessedEntity(
+                                        EntityDetails.newView(datasetFullyQualifiedName.getDatabaseName(),
+                                                datasetFullyQualifiedName.getDataverseName(),
+                                                datasetFullyQualifiedName.getDatasetName()));
+                            } else {
+                                metadataProvider.addAccessedEntity(
+                                        EntityDetails.newDataset(datasetFullyQualifiedName.getDatabaseName(),
+                                                datasetFullyQualifiedName.getDataverseName(),
+                                                datasetFullyQualifiedName.getDatasetName()));
+                            }
                             DatasetFullyQualifiedName synonymReference = dsArgs.third;
                             if (synonymReference != null) {
                                 // resolved via synonym -> store synonym name as a dependency
@@ -290,8 +295,8 @@
                     } else {
                         if (seenFunctions.add(signature)) {
                             String functionName = signature.getName() + "(" + signature.getArity() + ")";
-                            metadataProvider.addAccessedEntity(new EntityDetails(signature.getDatabaseName(),
-                                    signature.getDataverseName(), functionName, EntityDetails.EntityType.FUNCTION));
+                            metadataProvider.addAccessedEntity(EntityDetails.newFunction(signature.getDatabaseName(),
+                                    signature.getDataverseName(), functionName, signature.getArity()));
                             outFunctionDependencies.add(new DependencyFullyQualifiedName(signature.getDatabaseName(),
                                     signature.getDataverseName(), signature.getName(),
                                     Integer.toString(signature.getArity())));
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
index f371881..83ca05f 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
@@ -19,6 +19,9 @@
 
 package org.apache.asterix.lang.sqlpp.rewrites.visitor;
 
+import static org.apache.asterix.common.utils.ConstantUtil.PERCENT;
+import static org.apache.asterix.common.utils.ConstantUtil.UNDERSCORE;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -34,10 +37,12 @@
 import org.apache.asterix.lang.common.expression.QuantifiedExpression.Quantifier;
 import org.apache.asterix.lang.common.expression.VariableExpr;
 import org.apache.asterix.lang.common.literal.FalseLiteral;
+import org.apache.asterix.lang.common.literal.StringLiteral;
 import org.apache.asterix.lang.common.literal.TrueLiteral;
 import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
 import org.apache.asterix.lang.common.struct.OperatorType;
 import org.apache.asterix.lang.common.struct.QuantifiedPair;
+import org.apache.asterix.lang.common.util.ExpressionUtils;
 import org.apache.asterix.lang.sqlpp.expression.CaseExpression;
 import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
 import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
@@ -82,14 +87,27 @@
         return operatorExpr;
     }
 
-    private Expression processLikeOperator(OperatorExpr operatorExpr, OperatorType opType) {
+    private Expression processLikeOperator(OperatorExpr operatorExpr, OperatorType opType) throws CompilationException {
         CallExpr likeExpr =
                 new CallExpr(new FunctionSignature(BuiltinFunctions.STRING_LIKE), operatorExpr.getExprList());
         likeExpr.addHints(operatorExpr.getHints());
         likeExpr.setSourceLocation(operatorExpr.getSourceLocation());
         switch (opType) {
             case LIKE:
-                return likeExpr;
+                Expression target = operatorExpr.getExprList().get(0);
+                Expression patternExpr = operatorExpr.getExprList().get(1);
+                String patternStr = ExpressionUtils.getStringLiteral(patternExpr);
+                if (patternStr != null && patternStr.endsWith(PERCENT) && patternStr.length() > 1) {
+                    String prefix = patternStr.substring(0, patternStr.length() - 1);
+                    if (!prefix.contains(PERCENT) && !prefix.contains(UNDERSCORE)) {
+                        return convertLikeToRange(operatorExpr, target, prefix);
+                    }
+                }
+                if (patternStr != null && !patternStr.contains(PERCENT) && !patternStr.contains(UNDERSCORE)) {
+                    return createOperatorExpression(OperatorType.EQ, target, patternExpr, operatorExpr.getHints(),
+                            operatorExpr.getSourceLocation());
+                }
+                return createLikeExpression(operatorExpr);
             case NOT_LIKE:
                 CallExpr notLikeExpr = new CallExpr(new FunctionSignature(BuiltinFunctions.NOT),
                         new ArrayList<>(Collections.singletonList(likeExpr)));
@@ -156,19 +174,8 @@
         Expression target = operatorExpr.getExprList().get(0);
         Expression left = operatorExpr.getExprList().get(1);
         Expression right = operatorExpr.getExprList().get(2);
+        Expression andExpr = createRangeExpression(target, OperatorType.GE, left, right, OperatorType.LE, operatorExpr);
 
-        // Creates the expression target >= left.
-        Expression leftComparison = createOperatorExpression(OperatorType.GE, target, left, operatorExpr.getHints(),
-                operatorExpr.getSourceLocation());
-        // Creates the expression target <= right.
-        Expression targetCopy = (Expression) SqlppRewriteUtil.deepCopy(target);
-
-        // remove any selectivity hints from operatorExpr; do not want to duplicate those hints
-        Expression rightComparison = createOperatorExpression(OperatorType.LE, targetCopy, right,
-                removeSelectivityHints(operatorExpr), operatorExpr.getSourceLocation());
-
-        Expression andExpr = createOperatorExpression(OperatorType.AND, leftComparison, rightComparison, null,
-                operatorExpr.getSourceLocation());
         switch (opType) {
             case BETWEEN:
                 return andExpr;
@@ -261,4 +268,37 @@
         }
         return comparison;
     }
+
+    private Expression createRangeExpression(Expression target, OperatorType leftOp, Expression left, Expression right,
+            OperatorType rightOp, OperatorExpr operatorExpr) throws CompilationException {
+        Expression leftComparison = createOperatorExpression(leftOp, target, left, operatorExpr.getHints(),
+                operatorExpr.getSourceLocation());
+        Expression targetCopy = (Expression) SqlppRewriteUtil.deepCopy(target);
+
+        // remove any selectivity hints from operatorExpr; do not want to duplicate those hints
+        Expression rightComparison = createOperatorExpression(rightOp, targetCopy, right,
+                removeSelectivityHints(operatorExpr), operatorExpr.getSourceLocation());
+
+        Expression andExpr = createOperatorExpression(OperatorType.AND, leftComparison, rightComparison, null,
+                operatorExpr.getSourceLocation());
+        return andExpr;
+    }
+
+    private Expression convertLikeToRange(OperatorExpr operatorExpr, Expression target, String prefix)
+            throws CompilationException {
+        int lastCodePoint = prefix.codePointAt(prefix.length() - 1);
+        String incrementedLastChar = new String(Character.toChars(lastCodePoint + 1));
+        String incrementedStr = prefix.substring(0, prefix.length() - 1) + incrementedLastChar;
+        Expression left = new LiteralExpr(new StringLiteral(prefix));
+        Expression right = new LiteralExpr(new StringLiteral(incrementedStr));
+        return createRangeExpression(target, OperatorType.GE, left, right, OperatorType.LT, operatorExpr);
+    }
+
+    private static CallExpr createLikeExpression(OperatorExpr operatorExpr) {
+        CallExpr likeExpr =
+                new CallExpr(new FunctionSignature(BuiltinFunctions.STRING_LIKE), operatorExpr.getExprList());
+        likeExpr.addHints(operatorExpr.getHints());
+        likeExpr.setSourceLocation(operatorExpr.getSourceLocation());
+        return likeExpr;
+    }
 }
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppLoadAccessedDataset.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppLoadAccessedDataset.java
index 2f3323f..1b1cf83 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppLoadAccessedDataset.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppLoadAccessedDataset.java
@@ -84,8 +84,13 @@
                 }
             }
 
-            context.getMetadataProvider()
-                    .addAccessedEntity(new EntityDetails(databaseName, dataverseName, datasetName, entityType));
+            if (entityType == EntityDetails.EntityType.VIEW) {
+                context.getMetadataProvider()
+                        .addAccessedEntity(EntityDetails.newView(databaseName, dataverseName, datasetName));
+            } else {
+                context.getMetadataProvider()
+                        .addAccessedEntity(EntityDetails.newDataset(databaseName, dataverseName, datasetName));
+            }
 
         } else {
             FunctionSignature signature = expression.getFunctionSignature();
@@ -93,9 +98,9 @@
             if (declaredFunctions.containsKey(signature)) {
                 return;
             }
-            String functionName = signature.getName() + "(" + signature.getArity() + ")";
-            context.getMetadataProvider().addAccessedEntity(new EntityDetails(signature.getDatabaseName(),
-                    signature.getDataverseName(), functionName, EntityDetails.EntityType.FUNCTION));
+            String functionName = EntityDetails.getFunctionNameWithArity(signature.getName(), signature.getArity());
+            context.getMetadataProvider().addAccessedEntity(EntityDetails.newFunction(signature.getDatabaseName(),
+                    signature.getDataverseName(), functionName, signature.getArity()));
         }
     }
 }
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
index da5f0ab..2ec743d 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
+++ b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
@@ -2936,6 +2936,9 @@
   Namespace namespace = nameComponents == null ? null : nameComponents.first;
   String datasetName = nameComponents == null ? null : nameComponents.second.getValue();
   List<Expression> pathExprs;
+  RecordTypeDefinition recordTypeDefinition = null;
+  TypeExpression typeExpr = null;
+  Boolean isRecordTypeDefinition = false;
 
   List<Expression> partitionExprs = new ArrayList<Expression>();
   Map<Integer, VariableExpr> partitionVarExprs = new HashMap<Integer, VariableExpr>();
@@ -2947,6 +2950,21 @@
   <TO> adapterName = AdapterName()
   <PATH> <LEFTPAREN> pathExprs = ExpressionList() <RIGHTPAREN>
   (CopyToOverClause(partitionExprs, partitionVarExprs, orderbyList, orderbyModifierList, orderbyNullModifierList))?
+  (<TYPE> <LEFTPAREN>
+    {
+      recordTypeDefinition = RecordTypeDef();
+      isRecordTypeDefinition = true;
+    }
+    <RIGHTPAREN>) ?
+  (<AS>
+    {
+      if (isRecordTypeDefinition == false) {
+        typeExpr = DatasetRecordTypeSpecification(false, null);
+      } else {
+        throw new SqlppParseException(getSourceLocation(token), "Syntax error: Both 'TYPE()' and 'AS()' are provided. Please use either 'TYPE()' or 'AS()'.");
+      }
+    }
+  )?
   <WITH> withRecord = RecordConstructor()
     {
        ExternalDetailsDecl edd = new ExternalDetailsDecl();
@@ -2961,8 +2979,7 @@
           usedAlias = new VariableExpr(SqlppVariableUtil.toInternalVariableIdentifier(datasetName));
        }
 
-       CopyToStatement stmt = new CopyToStatement(namespace, datasetName, query, usedAlias, edd, pathExprs,
-            partitionExprs, partitionVarExprs, orderbyList, orderbyModifierList, orderbyNullModifierList, getVarCounter());
+       CopyToStatement stmt = new CopyToStatement(namespace, datasetName, query, usedAlias, edd, pathExprs, partitionExprs, partitionVarExprs, orderbyList, orderbyModifierList, orderbyNullModifierList, getVarCounter(), typeExpr, recordTypeDefinition);
        return addSourceLocation(stmt, startToken);
     }
 }
@@ -4407,7 +4424,7 @@
   (
     <MUL> { star = true; }
     |
-    ( expr1 = Expression() ( <COLON> { slice = true; } ( expr2 = Expression() )? )? )
+    ( (expr1 = Expression()) ? ( <COLON> { slice = true; } ( expr2 = Expression() )? )? )
   )
   <RIGHTBRACKET>
   {
@@ -4418,10 +4435,14 @@
       ensureIntegerLiteral( (LiteralExpr) expr2, "Index");
     }
     AbstractAccessor resultAccessor;
-    if (slice) {
+
+    if (star) {
+          resultAccessor = new IndexAccessor(inputExpr, IndexAccessor.IndexKind.STAR, null);
+    } else if (slice || expr1 == null) {
+      if (expr1 == null) {
+        expr1 = new LiteralExpr(new LongIntegerLiteral(0L));
+      }
       resultAccessor = new ListSliceExpression(inputExpr, expr1, expr2);
-    } else if (star) {
-      resultAccessor = new IndexAccessor(inputExpr, IndexAccessor.IndexKind.STAR, null);
     } else {
       resultAccessor = new IndexAccessor(inputExpr, IndexAccessor.IndexKind.ELEMENT, expr1);
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 0eb1111..8399f96 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -451,7 +451,7 @@
                 InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
                 Index primaryIndex = Index.createPrimaryIndex(dataset.getDatabaseName(), dataset.getDataverseName(),
                         dataset.getDatasetName(), id.getPrimaryKey(), id.getKeySourceIndicator(),
-                        id.getPrimaryKeyType(), dataset.getPendingOp());
+                        id.getPrimaryKeyType(), dataset.getPendingOp(), dataset.getCreator());
 
                 addIndex(txnId, primaryIndex);
             }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
index aeb6d13..70bf83a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
@@ -165,26 +165,26 @@
 
     public void dropDataset(String database, DataverseName dataverseName, String datasetName) {
         Dataset dataset = new Dataset(database, dataverseName, datasetName, null, null, null, null, null, null, null,
-                null, null, -1, MetadataUtil.PENDING_NO_OP);
+                null, null, -1, MetadataUtil.PENDING_NO_OP, null);
         droppedCache.addDatasetIfNotExists(dataset);
         logAndApply(new MetadataLogicalOperation(dataset, false));
     }
 
     public void dropIndex(String database, DataverseName dataverseName, String datasetName, String indexName) {
         Index index = new Index(database, dataverseName, datasetName, indexName, null, null, false, false,
-                MetadataUtil.PENDING_NO_OP);
+                MetadataUtil.PENDING_NO_OP, null);
         droppedCache.addIndexIfNotExists(index);
         logAndApply(new MetadataLogicalOperation(index, false));
     }
 
     public void dropDatabase(String databaseName) {
-        Database database = new Database(databaseName, false, MetadataUtil.PENDING_NO_OP);
+        Database database = new Database(databaseName, false, MetadataUtil.PENDING_NO_OP, null);
         droppedCache.addDatabaseIfNotExists(database);
         logAndApply(new MetadataLogicalOperation(database, false));
     }
 
     public void dropDataverse(String database, DataverseName dataverseName) {
-        Dataverse dataverse = new Dataverse(database, dataverseName, null, MetadataUtil.PENDING_NO_OP);
+        Dataverse dataverse = new Dataverse(database, dataverseName, null, MetadataUtil.PENDING_NO_OP, null);
         droppedCache.addDataverseIfNotExists(dataverse);
         logAndApply(new MetadataLogicalOperation(dataverse, false));
     }
@@ -203,7 +203,7 @@
 
     public void dropFunction(FunctionSignature signature) {
         Function function = new Function(signature, null, null, null, null, null, null, null, null, null, null, false,
-                false, null, null);
+                false, null, null, null);
         droppedCache.addFunctionIfNotExists(function);
         logAndApply(new MetadataLogicalOperation(function, false));
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index 71458f9..901ffdf 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -71,6 +71,7 @@
 import org.apache.asterix.metadata.entities.Node;
 import org.apache.asterix.metadata.entities.NodeGroup;
 import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.BuiltinTypeMap;
 import org.apache.asterix.om.types.IAType;
@@ -221,13 +222,12 @@
             IDatasetDetails id = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH,
                     indexes[i].getPartitioningExpr(), indexes[i].getPartitioningExpr(), null,
                     indexes[i].getPartitioningExprType(), false, null, null);
-            MetadataManager.INSTANCE.addDataset(mdTxnCtx,
-                    new Dataset(indexes[i].getDatabaseName(), indexes[i].getDataverseName(),
-                            indexes[i].getIndexedDatasetName(), indexes[i].getDatabaseName(),
-                            indexes[i].getDataverseName(), indexes[i].getPayloadRecordType().getTypeName(),
-                            indexes[i].getNodeGroupName(), StorageConstants.DEFAULT_COMPACTION_POLICY_NAME,
-                            StorageConstants.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<>(),
-                            DatasetType.INTERNAL, indexes[i].getDatasetId().getId(), MetadataUtil.PENDING_NO_OP));
+            MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(indexes[i].getDatabaseName(),
+                    indexes[i].getDataverseName(), indexes[i].getIndexedDatasetName(), indexes[i].getDatabaseName(),
+                    indexes[i].getDataverseName(), indexes[i].getPayloadRecordType().getTypeName(),
+                    indexes[i].getNodeGroupName(), StorageConstants.DEFAULT_COMPACTION_POLICY_NAME,
+                    StorageConstants.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<>(), DatasetType.INTERNAL,
+                    indexes[i].getDatasetId().getId(), MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR));
         }
         if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Finished inserting initial datasets.");
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
index 7b33682..f11ab01 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
@@ -24,6 +24,7 @@
 import org.apache.asterix.metadata.entities.Database;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.utils.RecordUtil;
 import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
 
@@ -31,18 +32,19 @@
 
     //--------------------------------------- Databases ----------------------------------------//
     public static final Database SYSTEM_DATABASE =
-            new Database(MetadataConstants.SYSTEM_DATABASE, true, MetadataUtil.PENDING_NO_OP);
+            new Database(MetadataConstants.SYSTEM_DATABASE, true, MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
 
-    public static final Database DEFAULT_DATABASE =
-            new Database(MetadataConstants.DEFAULT_DATABASE, false, MetadataUtil.PENDING_NO_OP);
+    public static final Database DEFAULT_DATABASE = new Database(MetadataConstants.DEFAULT_DATABASE, false,
+            MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
 
     //--------------------------------------- Dataverses ----------------------------------------//
     public static final Dataverse METADATA_DATAVERSE =
             new Dataverse(MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME,
-                    NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, MetadataUtil.PENDING_NO_OP);
+                    NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
 
-    public static final Dataverse DEFAULT_DATAVERSE = new Dataverse(MetadataConstants.DEFAULT_DATABASE,
-            MetadataConstants.DEFAULT_DATAVERSE_NAME, NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+    public static final Dataverse DEFAULT_DATAVERSE =
+            new Dataverse(MetadataConstants.DEFAULT_DATABASE, MetadataConstants.DEFAULT_DATAVERSE_NAME,
+                    NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP, Creator.DEFAULT_CREATOR);
 
     //--------------------------------------- Datatypes -----------------------------------------//
     public static final Datatype ANY_OBJECT_DATATYPE =
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
index c35c03a..338e00d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -130,6 +130,11 @@
     public static final String FIELD_NAME_FULL_TEXT_FILTER_PIPELINE = "FullTextFilterPipeline";
     public static final String FIELD_NAME_FULL_TEXT_STOPWORD_LIST = "StopwordList";
 
+    //open field
+    public static final String FIELD_NAME_CREATOR_NAME = "Name";
+    public static final String FIELD_NAME_CREATOR_UUID = "Uuid";
+    public static final String CREATOR_ARECORD_FIELD_NAME = "Creator";
+
     //---------------------------------- Record Types Creation ----------------------------------//
     //--------------------------------------- Properties ----------------------------------------//
     public static final int PROPERTIES_NAME_FIELD_INDEX = 0;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/IExternalWriteDataSink.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/IExternalWriteDataSink.java
new file mode 100644
index 0000000..64f8d6d
--- /dev/null
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/IExternalWriteDataSink.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.metadata.declared;
+
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.hyracks.algebricks.core.algebra.metadata.IWriteDataSink;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+public interface IExternalWriteDataSink extends IWriteDataSink {
+    ARecordType getItemType();
+
+    SourceLocation getSourceLoc();
+}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index cc456e5..c152853 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -100,7 +100,6 @@
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.IAType;
-import org.apache.asterix.om.utils.ConstantExpressionUtil;
 import org.apache.asterix.om.utils.NonTaggedFormatUtil;
 import org.apache.asterix.runtime.base.AsterixTupleFilterFactory;
 import org.apache.asterix.runtime.formats.FormatUtils;
@@ -110,9 +109,6 @@
 import org.apache.asterix.runtime.operators.LSMSecondaryInsertDeleteWithNestedPlanOperatorDescriptor;
 import org.apache.asterix.runtime.operators.LSMSecondaryUpsertOperatorDescriptor;
 import org.apache.asterix.runtime.operators.LSMSecondaryUpsertWithNestedPlanOperatorDescriptor;
-import org.apache.asterix.runtime.writer.ExternalFileWriterFactory;
-import org.apache.asterix.runtime.writer.IExternalFileWriterFactory;
-import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -143,7 +139,6 @@
 import org.apache.hyracks.algebricks.runtime.base.AlgebricksPipeline;
 import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import org.apache.hyracks.algebricks.runtime.operators.writer.SinkExternalWriterRuntimeFactory;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
@@ -794,17 +789,9 @@
             IScalarEvaluatorFactory dynamicPathEvalFactory, ILogicalExpression staticPathExpr,
             SourceLocation pathSourceLocation, IWriteDataSink sink, RecordDescriptor inputDesc, Object sourceType)
             throws AlgebricksException {
-        String staticPath = staticPathExpr != null ? ConstantExpressionUtil.getStringConstant(staticPathExpr) : null;
-        IExternalFileWriterFactory fileWriterFactory =
-                ExternalWriterProvider.createWriterFactory(appCtx, sink, staticPath, pathSourceLocation);
-        fileWriterFactory.validate();
-        String fileExtension = ExternalWriterProvider.getFileExtension(sink);
-        int maxResult = ExternalWriterProvider.getMaxResult(sink);
-        IExternalPrinterFactory printerFactory = ExternalWriterProvider.createPrinter(appCtx, sink, sourceType);
-        ExternalFileWriterFactory writerFactory = new ExternalFileWriterFactory(fileWriterFactory, printerFactory,
-                fileExtension, maxResult, dynamicPathEvalFactory, staticPath, pathSourceLocation);
-        SinkExternalWriterRuntimeFactory runtime = new SinkExternalWriterRuntimeFactory(sourceColumn, partitionColumns,
-                partitionComparatorFactories, inputDesc, writerFactory);
+        IPushRuntimeFactory runtime = ExternalWriterProvider.getWriteFileRuntime(appCtx, sink, sourceType,
+                staticPathExpr, pathSourceLocation, dynamicPathEvalFactory, inputDesc, sourceColumn, partitionColumns,
+                partitionComparatorFactories);
         return new Pair<>(runtime, null);
     }
 
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/WriteDataSink.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/WriteDataSink.java
index 753ac54..d1667bf 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/WriteDataSink.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/WriteDataSink.java
@@ -21,20 +21,39 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.core.algebra.metadata.IWriteDataSink;
+import org.apache.hyracks.api.exceptions.SourceLocation;
 
-public class WriteDataSink implements IWriteDataSink {
+public class WriteDataSink implements IExternalWriteDataSink {
     private final String adapterName;
     private final Map<String, String> configuration;
+    private ARecordType itemType;
+    private SourceLocation sourceLoc;
 
-    public WriteDataSink(String adapterName, Map<String, String> configuration) {
+    public WriteDataSink(String adapterName, Map<String, String> configuration, ARecordType itemType,
+            SourceLocation sourceLoc) {
         this.adapterName = adapterName;
         this.configuration = configuration;
+        this.itemType = itemType;
+        this.sourceLoc = sourceLoc;
     }
 
     private WriteDataSink(WriteDataSink writeDataSink) {
         this.adapterName = writeDataSink.getAdapterName();
         this.configuration = new HashMap<>(writeDataSink.configuration);
+        this.itemType = writeDataSink.itemType;
+        this.sourceLoc = writeDataSink.sourceLoc;
+    }
+
+    @Override
+    public ARecordType getItemType() {
+        return itemType;
+    }
+
+    @Override
+    public SourceLocation getSourceLoc() {
+        return sourceLoc;
     }
 
     @Override
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java
index 8be93e3..6336596 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java
@@ -23,6 +23,7 @@
 
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.asterix.metadata.utils.Creator;
 
 /**
  * Metadata describing a database.
@@ -34,11 +35,13 @@
     private final String databaseName;
     private final boolean isSystemDatabase;
     private final int pendingOp;
+    private final Creator creator;
 
-    public Database(String databaseName, boolean isSystemDatabase, int pendingOp) {
+    public Database(String databaseName, boolean isSystemDatabase, int pendingOp, Creator creator) {
         this.databaseName = databaseName;
         this.isSystemDatabase = isSystemDatabase;
         this.pendingOp = pendingOp;
+        this.creator = creator;
     }
 
     public String getDatabaseName() {
@@ -53,6 +56,10 @@
         return pendingOp;
     }
 
+    public Creator getCreator() {
+        return creator;
+    }
+
     @Override
     public Database addToCache(MetadataCache cache) {
         return cache.addDatabaseIfNotExists(this);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 132efbc..cae1385 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -63,6 +63,7 @@
 import org.apache.asterix.metadata.declared.ArrayBTreeResourceFactoryProvider;
 import org.apache.asterix.metadata.declared.BTreeResourceFactoryProvider;
 import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.DatasetUtil;
 import org.apache.asterix.metadata.utils.IndexUtil;
 import org.apache.asterix.metadata.utils.InvertedIndexResourceFactoryProvider;
@@ -166,15 +167,16 @@
     private final String compressionScheme;
     private final DatasetFullyQualifiedName datasetFullyQualifiedName;
     private final DatasetFormatInfo datasetFormatInfo;
+    private final Creator creator;
 
     public Dataset(String databaseName, DataverseName dataverseName, String datasetName, String recordTypeDatabaseName,
             DataverseName recordTypeDataverseName, String recordTypeName, String nodeGroupName, String compactionPolicy,
             Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails, Map<String, String> hints,
-            DatasetType datasetType, int datasetId, int pendingOp) {
+            DatasetType datasetType, int datasetId, int pendingOp, Creator creator) {
         this(databaseName, dataverseName, datasetName, recordTypeDatabaseName, recordTypeDataverseName, recordTypeName,
                 null, /*metaTypeDataverseName*/null, /*metaTypeName*/null, nodeGroupName, compactionPolicy,
                 compactionPolicyProperties, datasetDetails, hints, datasetType, datasetId, pendingOp,
-                CompressionManager.NONE, DatasetFormatInfo.SYSTEM_DEFAULT);
+                CompressionManager.NONE, DatasetFormatInfo.SYSTEM_DEFAULT, creator);
     }
 
     public Dataset(String databaseName, DataverseName dataverseName, String datasetName, String itemTypeDatabaseName,
@@ -182,11 +184,11 @@
             DataverseName metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName,
             String compactionPolicy, Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails,
             Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp, String compressionScheme,
-            DatasetFormatInfo datasetFormatInfo) {
+            DatasetFormatInfo datasetFormatInfo, Creator creator) {
         this(databaseName, dataverseName, datasetName, itemTypeDatabaseName, itemTypeDataverseName, itemTypeName,
                 metaItemTypeDatabaseName, metaItemTypeDataverseName, metaItemTypeName, nodeGroupName, compactionPolicy,
                 compactionPolicyProperties, datasetDetails, hints, datasetType, datasetId, pendingOp, 0L,
-                compressionScheme, datasetFormatInfo);
+                compressionScheme, datasetFormatInfo, creator);
     }
 
     public Dataset(Dataset dataset) {
@@ -195,7 +197,7 @@
                 dataset.metaTypeDataverseName, dataset.metaTypeName, dataset.nodeGroupName,
                 dataset.compactionPolicyFactory, dataset.compactionPolicyProperties, dataset.datasetDetails,
                 dataset.hints, dataset.datasetType, dataset.datasetId, dataset.pendingOp, dataset.rebalanceCount,
-                dataset.compressionScheme, dataset.datasetFormatInfo);
+                dataset.compressionScheme, dataset.datasetFormatInfo, dataset.creator);
     }
 
     public Dataset(String databaseName, DataverseName dataverseName, String datasetName, String itemTypeDatabaseName,
@@ -203,7 +205,7 @@
             DataverseName metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName,
             String compactionPolicy, Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails,
             Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp, long rebalanceCount,
-            String compressionScheme, DatasetFormatInfo datasetFormatInfo) {
+            String compressionScheme, DatasetFormatInfo datasetFormatInfo, Creator creator) {
         this.databaseName = Objects.requireNonNull(databaseName);
         this.dataverseName = dataverseName;
         this.datasetName = datasetName;
@@ -225,6 +227,7 @@
         this.compressionScheme = compressionScheme;
         this.datasetFullyQualifiedName = new DatasetFullyQualifiedName(databaseName, dataverseName, datasetName);
         this.datasetFormatInfo = datasetFormatInfo;
+        this.creator = creator;
     }
 
     public String getDatabaseName() {
@@ -301,6 +304,10 @@
         return rebalanceCount;
     }
 
+    public Creator getCreator() {
+        return creator;
+    }
+
     public boolean hasMetaPart() {
         return metaTypeDataverseName != null && metaTypeName != null;
     }
@@ -395,7 +402,7 @@
                             getMetaItemTypeDataverseName(), getMetaItemTypeName(), getNodeGroupName(),
                             getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(),
                             getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP, getCompressionScheme(),
-                            getDatasetFormatInfo()));
+                            getDatasetFormatInfo(), creator));
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
             bActiveTxn.setValue(false);
@@ -856,7 +863,7 @@
                 this.metaTypeDataverseName, this.metaTypeName, targetNodeGroupName, this.compactionPolicyFactory,
                 this.compactionPolicyProperties, this.datasetDetails, this.hints, this.datasetType,
                 DatasetIdFactory.generateAlternatingDatasetId(this.datasetId), this.pendingOp, this.rebalanceCount + 1,
-                this.compressionScheme, this.datasetFormatInfo);
+                this.compressionScheme, this.datasetFormatInfo, creator);
     }
 
     // Gets an array of partition numbers for this dataset.
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
index bba1763..5b52be4 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
@@ -24,6 +24,7 @@
 import org.apache.asterix.common.metadata.DataverseName;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.asterix.metadata.utils.Creator;
 
 /**
  * Metadata describing a dataverse.
@@ -35,12 +36,14 @@
     private final DataverseName dataverseName;
     private final String dataFormat;
     private final int pendingOp;
+    private final Creator creator;
 
-    public Dataverse(String databaseName, DataverseName dataverseName, String format, int pendingOp) {
+    public Dataverse(String databaseName, DataverseName dataverseName, String format, int pendingOp, Creator creator) {
         this.databaseName = Objects.requireNonNull(databaseName);
         this.dataverseName = dataverseName;
         this.dataFormat = format;
         this.pendingOp = pendingOp;
+        this.creator = creator;
     }
 
     public String getDatabaseName() {
@@ -59,6 +62,10 @@
         return pendingOp;
     }
 
+    public Creator getCreator() {
+        return creator;
+    }
+
     @Override
     public Dataverse addToCache(MetadataCache cache) {
         return cache.addDataverseIfNotExists(this);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/EntityDetails.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/EntityDetails.java
index a341a39..c7e771b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/EntityDetails.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/EntityDetails.java
@@ -20,26 +20,78 @@
 
 import org.apache.asterix.common.metadata.DataverseName;
 
+/**
+ * This class provides static factory methods for creating entity details.
+ */
+
 public class EntityDetails {
 
     public enum EntityType {
         DATASET,
         VIEW,
-        FUNCTION
+        FUNCTION,
+        DATABASE,
+        DATAVERSE,
+        SYNONYM,
+        INDEX
     }
 
     private final String databaseName;
     private final DataverseName dataverseName;
     private final String entityName;
     private final EntityType entityType;
+    // The number of arguments that the function accepts. Relevant only for function entity.
+    private int functionArity;
 
-    public EntityDetails(String databaseName, DataverseName dataverseName, String entityName, EntityType entityType) {
+    private EntityDetails(String databaseName, DataverseName dataverseName, String entityName, EntityType entityType) {
         this.databaseName = databaseName;
         this.dataverseName = dataverseName;
         this.entityName = entityName;
         this.entityType = entityType;
     }
 
+    private EntityDetails(String databaseName, DataverseName dataverseName, String entityName, EntityType entityType,
+            int functionArity) {
+        this.databaseName = databaseName;
+        this.dataverseName = dataverseName;
+        this.entityName = entityName;
+        this.entityType = entityType;
+        this.functionArity = functionArity;
+    }
+
+    public static EntityDetails newDatabase(String databaseName) {
+        return new EntityDetails(databaseName, null, null, EntityType.DATABASE);
+    }
+
+    public static EntityDetails newDataverse(String databaseName, DataverseName dataverseName) {
+        return new EntityDetails(databaseName, dataverseName, null, EntityType.DATAVERSE);
+    }
+
+    public static EntityDetails newDataset(String databaseName, DataverseName dataverseName, String datasetName) {
+        return new EntityDetails(databaseName, dataverseName, datasetName, EntityType.DATASET);
+    }
+
+    public static EntityDetails newView(String databaseName, DataverseName dataverseName, String viewName) {
+        return new EntityDetails(databaseName, dataverseName, viewName, EntityType.VIEW);
+    }
+
+    public static EntityDetails newFunction(String databaseName, DataverseName dataverseName, String functionName,
+            int functionArity) {
+        return new EntityDetails(databaseName, dataverseName, functionName, EntityType.FUNCTION, functionArity);
+    }
+
+    public static EntityDetails newSynonym(String databaseName, DataverseName dataverseName, String synonymName) {
+        return new EntityDetails(databaseName, dataverseName, synonymName, EntityType.SYNONYM);
+    }
+
+    public static EntityDetails newIndex(String databaseName, DataverseName dataverseName, String indexName) {
+        return new EntityDetails(databaseName, dataverseName, indexName, EntityType.INDEX);
+    }
+
+    public static EntityDetails newExtension(String extensionName) {
+        return new EntityDetails(null, null, extensionName, null);
+    }
+
     public String getDatabaseName() {
         return databaseName;
     }
@@ -55,4 +107,12 @@
     public EntityType getEntityType() {
         return entityType;
     }
+
+    public int getFunctionArity() {
+        return functionArity;
+    }
+
+    public static String getFunctionNameWithArity(String functionName, int functionArity) {
+        return functionName + "(" + functionArity + ")";
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
index 82c9514..3c1515b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
@@ -29,6 +29,7 @@
 import org.apache.asterix.common.metadata.DependencyFullyQualifiedName;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.types.TypeSignature;
 
 public class Function implements IMetadataEntity<Function> {
@@ -49,12 +50,13 @@
     private final Boolean nullCall; // null for SQL++ and AQL functions
     private final Map<String, String> resources;
     private final List<List<DependencyFullyQualifiedName>> dependencies;
+    private final Creator creator;
 
     public Function(FunctionSignature signature, List<String> paramNames, List<TypeSignature> paramTypes,
             TypeSignature returnType, String functionBody, String functionKind, String language,
             String libraryDatabaseName, DataverseName libraryDataverseName, String libraryName,
             List<String> externalIdentifier, Boolean nullCall, Boolean deterministic, Map<String, String> resources,
-            List<List<DependencyFullyQualifiedName>> dependencies) {
+            List<List<DependencyFullyQualifiedName>> dependencies, Creator creator) {
         this.signature = signature;
         this.paramNames = paramNames;
         this.paramTypes = paramTypes;
@@ -72,6 +74,7 @@
         this.dependencies = dependencies == null
                 ? Arrays.asList(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())
                 : dependencies;
+        this.creator = creator;
     }
 
     public FunctionSignature getSignature() {
@@ -161,6 +164,10 @@
         return dependencies;
     }
 
+    public Creator getCreator() {
+        return creator;
+    }
+
     @Override
     public Function addToCache(MetadataCache cache) {
         return cache.addFunctionIfNotExists(this);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
index 66192e0..4709c13 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
@@ -34,6 +34,7 @@
 import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.IndexUtil;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.AUnionType;
@@ -66,10 +67,11 @@
     private final boolean isEnforced;
     // Type of pending operations with respect to atomic DDL operation
     private int pendingOp;
+    private final Creator creator;
 
     public Index(String databaseName, DataverseName dataverseName, String datasetName, String indexName,
-            IndexType indexType, IIndexDetails indexDetails, boolean isEnforced, boolean isPrimaryIndex,
-            int pendingOp) {
+            IndexType indexType, IIndexDetails indexDetails, boolean isEnforced, boolean isPrimaryIndex, int pendingOp,
+            Creator creator) {
         boolean categoryOk = (indexType == null && indexDetails == null) || (IndexCategory
                 .of(Objects.requireNonNull(indexType)) == ((AbstractIndexDetails) Objects.requireNonNull(indexDetails))
                         .getIndexCategory());
@@ -85,26 +87,27 @@
         this.isPrimaryIndex = isPrimaryIndex;
         this.isEnforced = isEnforced;
         this.pendingOp = pendingOp;
+        this.creator = creator;
     }
 
     @Deprecated
     public Index(String database, DataverseName dataverseName, String datasetName, String indexName,
             IndexType indexType, List<List<String>> keyFieldNames, List<Integer> keyFieldSourceIndicators,
             List<IAType> keyFieldTypes, boolean overrideKeyFieldTypes, boolean isEnforced, boolean isPrimaryIndex,
-            int pendingOp, OptionalBoolean excludeUnknownKey) {
+            int pendingOp, OptionalBoolean excludeUnknownKey, Creator creator) {
         this(database, dataverseName, datasetName,
                 indexName, indexType, createSimpleIndexDetails(indexType, keyFieldNames, keyFieldSourceIndicators,
                         keyFieldTypes, overrideKeyFieldTypes, excludeUnknownKey),
-                isEnforced, isPrimaryIndex, pendingOp);
+                isEnforced, isPrimaryIndex, pendingOp, creator);
     }
 
     public static Index createPrimaryIndex(String database, DataverseName dataverseName, String datasetName,
             List<List<String>> keyFieldNames, List<Integer> keyFieldSourceIndicators, List<IAType> keyFieldTypes,
-            int pendingOp) {
-        return new Index(database, dataverseName, datasetName,
-                datasetName, IndexType.BTREE, new ValueIndexDetails(keyFieldNames, keyFieldSourceIndicators,
-                        keyFieldTypes, false, OptionalBoolean.empty(), OptionalBoolean.empty(), null, null, null),
-                false, true, pendingOp);
+            int pendingOp, Creator creator) {
+        return new Index(database, dataverseName, datasetName, datasetName, IndexType.BTREE,
+                new ValueIndexDetails(keyFieldNames, keyFieldSourceIndicators, keyFieldTypes, false,
+                        OptionalBoolean.empty(), OptionalBoolean.empty(), null, null, null),
+                false, true, pendingOp, creator);
     }
 
     public String getDatabaseName() {
@@ -155,6 +158,10 @@
         return !isPrimaryIndex();
     }
 
+    public Creator getCreator() {
+        return creator;
+    }
+
     public boolean isPrimaryKeyIndex() {
         // a primary key index has no key field names
         return indexType == IndexType.BTREE && ((ValueIndexDetails) indexDetails).keyFieldNames.isEmpty();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Synonym.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Synonym.java
index ebe780a..a355b6d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Synonym.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Synonym.java
@@ -24,6 +24,7 @@
 import org.apache.asterix.common.metadata.DataverseName;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.asterix.metadata.utils.Creator;
 
 public class Synonym implements IMetadataEntity<Synonym> {
 
@@ -40,15 +41,17 @@
     private final DataverseName objectDataverseName;
 
     private final String objectName;
+    private final Creator creator;
 
     public Synonym(String databaseName, DataverseName dataverseName, String synonymName, String objectDatabaseName,
-            DataverseName objectDataverseName, String objectName) {
+            DataverseName objectDataverseName, String objectName, Creator creator) {
         this.databaseName = Objects.requireNonNull(databaseName);
         this.dataverseName = Objects.requireNonNull(dataverseName);
         this.synonymName = Objects.requireNonNull(synonymName);
         this.objectDatabaseName = Objects.requireNonNull(objectDatabaseName);
         this.objectDataverseName = Objects.requireNonNull(objectDataverseName);
         this.objectName = Objects.requireNonNull(objectName);
+        this.creator = creator;
     }
 
     public String getDatabaseName() {
@@ -75,6 +78,10 @@
         return objectName;
     }
 
+    public Creator getCreator() {
+        return creator;
+    }
+
     @Override
     public boolean equals(Object o) {
         if (this == o)
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java
index 9752dc8..ef1b450 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java
@@ -21,13 +21,17 @@
 
 import java.util.Calendar;
 
+import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.metadata.bootstrap.DatabaseEntity;
+import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.Database;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.AInt32;
 import org.apache.asterix.om.base.AMutableInt32;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -55,7 +59,9 @@
         boolean isSystemDatabase =
                 ((ABoolean) databaseRecord.getValueByPos(databaseEntity.systemDatabaseIndex())).getBoolean();
         int pendingOp = ((AInt32) databaseRecord.getValueByPos(databaseEntity.pendingOpIndex())).getIntegerValue();
-        return new Database(databaseName, isSystemDatabase, pendingOp);
+        Creator creator = Creator.createOrDefault(databaseRecord);
+
+        return new Database(databaseName, isSystemDatabase, pendingOp, creator);
     }
 
     @Override
@@ -94,6 +100,9 @@
         int32Serde.serialize(aInt32, fieldValue.getDataOutput());
         recordBuilder.addField(databaseEntity.pendingOpIndex(), fieldValue);
 
+        // write open fields
+        writeOpenFields(database);
+
         // write the payload record
         recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
@@ -101,4 +110,39 @@
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
         return tuple;
     }
+
+    protected void writeOpenFields(Database database) throws HyracksDataException {
+        writeDatabaseCreator(database);
+    }
+
+    private void writeDatabaseCreator(Database database) throws HyracksDataException {
+        if (databaseEntity.databaseNameIndex() >= 0) {
+            Creator creatorInfo = database.getCreator();
+            RecordBuilder creatorObject = new RecordBuilder();
+            creatorObject.reset(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getName());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getUuid());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            creatorObject.write(fieldValue.getDataOutput(), true);
+            recordBuilder.addField(fieldName, fieldValue);
+        }
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 7613dd3..191da69 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -52,6 +52,7 @@
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
 import org.apache.asterix.metadata.entities.ViewDetails;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.DatasetUtil;
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.ADateTime;
@@ -412,11 +413,12 @@
         long rebalanceCount = getRebalanceCount(datasetRecord);
         String compressionScheme = getCompressionScheme(datasetRecord);
         DatasetFormatInfo datasetFormatInfo = getDatasetFormatInfo(datasetRecord);
+        Creator creator = Creator.createOrDefault(datasetRecord);
 
         return new Dataset(databaseName, dataverseName, datasetName, itemTypeDatabaseName, typeDataverseName, typeName,
                 metaItemTypeDatabaseName, metaTypeDataverseName, metaTypeName, nodeGroupName, compactionPolicy.first,
                 compactionPolicy.second, datasetDetails, hints, datasetType, datasetId, pendingOp, rebalanceCount,
-                compressionScheme, datasetFormatInfo);
+                compressionScheme, datasetFormatInfo, creator);
     }
 
     protected Pair<String, Map<String, String>> readCompactionPolicy(DatasetType datasetType, ARecord datasetRecord) {
@@ -680,6 +682,7 @@
         writeBlockLevelStorageCompression(dataset);
         writeOpenDetails(dataset);
         writeDatasetFormatInfo(dataset);
+        writeDatasetCreator(dataset);
     }
 
     private void writeOpenDetails(Dataset dataset) throws HyracksDataException {
@@ -863,4 +866,35 @@
 
         propertyRecordBuilder.write(out, true);
     }
+
+    private void writeDatasetCreator(Dataset dataset) throws HyracksDataException {
+        if (datasetEntity.databaseNameIndex() >= 0) {
+            Creator creatorInfo = dataset.getCreator();
+            RecordBuilder creatorObject = new RecordBuilder();
+            creatorObject.reset(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getName());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getUuid());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            creatorObject.write(fieldValue.getDataOutput(), true);
+            recordBuilder.addField(fieldName, fieldValue);
+        }
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index 42fcfbb..10c4b07 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -21,14 +21,18 @@
 
 import java.util.Calendar;
 
+import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.common.metadata.DataverseName;
 import org.apache.asterix.common.metadata.MetadataUtil;
 import org.apache.asterix.metadata.bootstrap.DataverseEntity;
+import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.Dataverse;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.base.AInt32;
 import org.apache.asterix.om.base.AMutableInt32;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -55,6 +59,7 @@
                 ((AString) dataverseRecord.getValueByPos(dataverseEntity.dataverseNameIndex())).getStringValue();
         DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
         int databaseNameIndex = dataverseEntity.databaseNameIndex();
+
         String databaseName;
         if (databaseNameIndex >= 0) {
             databaseName = ((AString) dataverseRecord.getValueByPos(databaseNameIndex)).getStringValue();
@@ -63,8 +68,9 @@
         }
         String format = ((AString) dataverseRecord.getValueByPos(dataverseEntity.dataFormatIndex())).getStringValue();
         int pendingOp = ((AInt32) dataverseRecord.getValueByPos(dataverseEntity.pendingOpIndex())).getIntegerValue();
+        Creator creator = Creator.createOrDefault(dataverseRecord);
 
-        return new Dataverse(databaseName, dataverseName, format, pendingOp);
+        return new Dataverse(databaseName, dataverseName, format, pendingOp, creator);
     }
 
     @Override
@@ -115,6 +121,9 @@
         int32Serde.serialize(aInt32, fieldValue.getDataOutput());
         recordBuilder.addField(dataverseEntity.pendingOpIndex(), fieldValue);
 
+        // write open fields
+        writeOpenFields(dataverse);
+
         // write record
         recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
@@ -122,4 +131,39 @@
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
         return tuple;
     }
+
+    protected void writeOpenFields(Dataverse dataverse) throws HyracksDataException {
+        writeDataverseCreator(dataverse);
+    }
+
+    private void writeDataverseCreator(Dataverse dataverse) throws HyracksDataException {
+        if (dataverseEntity.databaseNameIndex() >= 0) {
+            RecordBuilder creatorObject = new RecordBuilder();
+            creatorObject.reset(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+            Creator creatorInfo = dataverse.getCreator();
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getName());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getUuid());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            creatorObject.write(fieldValue.getDataOutput(), true);
+            recordBuilder.addField(fieldName, fieldValue);
+        }
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index 8191212..2741d12 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -61,6 +61,7 @@
 import org.apache.asterix.metadata.bootstrap.FunctionEntity;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.Function;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.ANull;
 import org.apache.asterix.om.base.AOrderedList;
@@ -203,10 +204,11 @@
         }
 
         FunctionSignature signature = new FunctionSignature(databaseName, dataverseName, functionName, arity);
+        Creator creator = Creator.createOrDefault(functionRecord);
 
         return new Function(signature, paramNames, paramTypes, returnType, definition, functionKind, language,
                 libraryDatabaseName, libraryDataverseName, libraryName, externalIdentifier, nullCall, deterministic,
-                resources, dependencies);
+                resources, dependencies, creator);
     }
 
     private List<TypeSignature> getParamTypes(ARecord functionRecord, String functionDatabaseName,
@@ -432,6 +434,7 @@
         writeLibrary(function);
         writeNullCall(function);
         writeDeterministic(function);
+        writeFunctionCreator(function);
     }
 
     protected void writeResources(Function function) throws HyracksDataException {
@@ -688,4 +691,35 @@
                 throw new AsterixException(ErrorCode.METADATA_ERROR, language);
         }
     }
+
+    private void writeFunctionCreator(Function function) throws HyracksDataException {
+        if (functionEntity.databaseNameIndex() >= 0) {
+            Creator creatorInfo = function.getCreator();
+            RecordBuilder creatorObject = new RecordBuilder();
+            creatorObject.reset(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getName());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getUuid());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            creatorObject.write(fieldValue.getDataOutput(), true);
+            recordBuilder.addField(fieldName, fieldValue);
+        }
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index 453ccc1..deca260 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -42,10 +42,12 @@
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataNode;
 import org.apache.asterix.metadata.bootstrap.IndexEntity;
+import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.declared.MetadataManagerUtil;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Index;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.metadata.utils.KeyFieldTypeUtil;
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.ACollectionCursor;
@@ -60,6 +62,7 @@
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.base.IACursor;
 import org.apache.asterix.om.base.IAObject;
+import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.asterix.om.types.AOrderedListType;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
@@ -535,9 +538,10 @@
             isEnforcingKeys = ((ABoolean) indexRecord.getValueByPos(isEnforcedFieldPos)).getBoolean();
         }
         int pendingOp = ((AInt32) indexRecord.getValueByPos(indexEntity.pendingOpIndex())).getIntegerValue();
+        Creator creator = Creator.createOrDefault(indexRecord);
 
         return new Index(databaseName, dataverseName, datasetName, indexName, indexType, indexDetails, isEnforcingKeys,
-                isPrimaryIndex, pendingOp);
+                isPrimaryIndex, pendingOp, creator);
     }
 
     @Override
@@ -683,6 +687,7 @@
         writeExcludeUnknownKey(index);
         writeCast(index);
         writeSampleDetails(index);
+        writeIndexCreator(index);
     }
 
     private void writeComplexSearchKeys(Index.ArrayIndexDetails indexDetails) throws HyracksDataException {
@@ -1014,4 +1019,35 @@
             }
         }
     }
+
+    private void writeIndexCreator(Index index) throws HyracksDataException {
+        if (indexEntity.databaseNameIndex() >= 0) {
+            Creator creatorInfo = index.getCreator();
+            RecordBuilder creatorObject = new RecordBuilder();
+            creatorObject.reset(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getName());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getUuid());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            creatorObject.write(fieldValue.getDataOutput(), true);
+            recordBuilder.addField(fieldName, fieldValue);
+        }
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/SynonymTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/SynonymTupleTranslator.java
index bb3ad28..389322c 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/SynonymTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/SynonymTupleTranslator.java
@@ -19,13 +19,16 @@
 
 package org.apache.asterix.metadata.entitytupletranslators;
 
+import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.common.metadata.DataverseName;
 import org.apache.asterix.common.metadata.MetadataUtil;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.bootstrap.SynonymEntity;
 import org.apache.asterix.metadata.entities.Synonym;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -70,9 +73,10 @@
         }
 
         String objectName = ((AString) synonymRecord.getValueByPos(synonymEntity.objectNameIndex())).getStringValue();
+        Creator creator = Creator.createOrDefault(synonymRecord);
 
         return new Synonym(databaseName, dataverseName, synonymName, objectDatabaseName, objectDataverseName,
-                objectName);
+                objectName, creator);
     }
 
     @Override
@@ -149,5 +153,37 @@
             stringSerde.serialize(aString, fieldValue.getDataOutput());
             recordBuilder.addField(fieldName, fieldValue);
         }
+        writeSynonymCreator(synonym);
+    }
+
+    private void writeSynonymCreator(Synonym synonym) throws HyracksDataException {
+        if (synonymEntity.databaseNameIndex() >= 0) {
+            Creator creatorInfo = synonym.getCreator();
+            RecordBuilder creatorObject = new RecordBuilder();
+            creatorObject.reset(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getName());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            aString.setValue(creatorInfo.getUuid());
+            stringSerde.serialize(aString, fieldValue.getDataOutput());
+            creatorObject.addField(fieldName, fieldValue);
+
+            fieldName.reset();
+            aString.setValue(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+            stringSerde.serialize(aString, fieldName.getDataOutput());
+            fieldValue.reset();
+            creatorObject.write(fieldValue.getDataOutput(), true);
+            recordBuilder.addField(fieldName, fieldValue);
+        }
     }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java
index b8583d0..1caea58 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java
@@ -22,25 +22,50 @@
 import java.util.Map;
 import java.util.zip.Deflater;
 
+import org.apache.asterix.cloud.parquet.ParquetSinkExternalWriterFactory;
 import org.apache.asterix.cloud.writer.GCSExternalFileWriterFactory;
 import org.apache.asterix.cloud.writer.S3ExternalFileWriterFactory;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.external.writer.HDFSExternalFileWriterFactory;
 import org.apache.asterix.external.writer.LocalFSExternalFileWriterFactory;
 import org.apache.asterix.external.writer.compressor.GzipExternalFileCompressStreamFactory;
 import org.apache.asterix.external.writer.compressor.IExternalFileCompressStreamFactory;
 import org.apache.asterix.external.writer.compressor.NoOpExternalFileCompressStreamFactory;
+import org.apache.asterix.external.writer.printer.CsvExternalFilePrinterFactory;
+import org.apache.asterix.external.writer.printer.ParquetExternalFilePrinterFactory;
+import org.apache.asterix.external.writer.printer.ParquetExternalFilePrinterFactoryProvider;
 import org.apache.asterix.external.writer.printer.TextualExternalFilePrinterFactory;
+import org.apache.asterix.formats.nontagged.CSVPrinterFactoryProvider;
 import org.apache.asterix.formats.nontagged.CleanJSONPrinterFactoryProvider;
+import org.apache.asterix.metadata.declared.IExternalWriteDataSink;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.om.utils.ConstantExpressionUtil;
 import org.apache.asterix.runtime.writer.ExternalFileWriterConfiguration;
+import org.apache.asterix.runtime.writer.ExternalFileWriterFactory;
 import org.apache.asterix.runtime.writer.IExternalFileWriterFactory;
 import org.apache.asterix.runtime.writer.IExternalFileWriterFactoryProvider;
 import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
+import org.apache.asterix.runtime.writer.PathResolverFactory;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.metadata.IWriteDataSink;
 import org.apache.hyracks.algebricks.data.IPrinterFactory;
+import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.operators.writer.SinkExternalWriterRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.writer.WriterPartitionerFactory;
+import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.SourceLocation;
 import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.util.StorageUtil;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
 
 public class ExternalWriterProvider {
     private static final Map<String, IExternalFileWriterFactoryProvider> CREATOR_MAP;
@@ -53,9 +78,10 @@
         addCreator(ExternalDataConstants.KEY_ADAPTER_NAME_LOCALFS, LocalFSExternalFileWriterFactory.PROVIDER);
         addCreator(ExternalDataConstants.KEY_ADAPTER_NAME_AWS_S3, S3ExternalFileWriterFactory.PROVIDER);
         addCreator(ExternalDataConstants.KEY_ADAPTER_NAME_GCS, GCSExternalFileWriterFactory.PROVIDER);
+        addCreator(ExternalDataConstants.KEY_ADAPTER_NAME_HDFS, HDFSExternalFileWriterFactory.PROVIDER);
     }
 
-    public static IExternalFileWriterFactory createWriterFactory(ICcApplicationContext appCtx, IWriteDataSink sink,
+    private static IExternalFileWriterFactory createWriterFactory(ICcApplicationContext appCtx, IWriteDataSink sink,
             String staticPath, SourceLocation pathExpressionLocation) {
         String adapterName = sink.getAdapterName().toLowerCase();
         IExternalFileWriterFactoryProvider creator = CREATOR_MAP.get(adapterName);
@@ -67,14 +93,14 @@
         return creator.create(createConfiguration(appCtx, sink, staticPath, pathExpressionLocation));
     }
 
-    public static String getFileExtension(IWriteDataSink sink) {
+    private static String getFileExtension(IWriteDataSink sink) {
         Map<String, String> configuration = sink.getConfiguration();
         String format = getFormat(configuration);
         String compression = getCompression(configuration);
         return format + (compression.isEmpty() ? "" : "." + compression);
     }
 
-    public static int getMaxResult(IWriteDataSink sink) {
+    private static int getMaxResult(IWriteDataSink sink) {
         String maxResultString = sink.getConfiguration().get(ExternalDataConstants.KEY_WRITER_MAX_RESULT);
         if (maxResultString == null) {
             return ExternalDataConstants.WRITER_MAX_RESULT_DEFAULT;
@@ -82,6 +108,14 @@
         return Integer.parseInt(maxResultString);
     }
 
+    private static int getMaxParquetSchema(Map<String, String> conf) {
+        String maxResultString = conf.get(ExternalDataConstants.PARQUET_MAX_SCHEMAS_KEY);
+        if (maxResultString == null) {
+            return ExternalDataConstants.PARQUET_MAX_SCHEMAS_DEFAULT_VALUE;
+        }
+        return Integer.parseInt(maxResultString);
+    }
+
     private static ExternalFileWriterConfiguration createConfiguration(ICcApplicationContext appCtx,
             IWriteDataSink sink, String staticPath, SourceLocation pathExpressionLocation) {
         Map<String, String> params = sink.getConfiguration();
@@ -106,21 +140,127 @@
         CREATOR_MAP.put(adapterName.toLowerCase(), creator);
     }
 
-    public static IExternalPrinterFactory createPrinter(ICcApplicationContext appCtx, IWriteDataSink sink,
-            Object sourceType) {
+    public static IPushRuntimeFactory getWriteFileRuntime(ICcApplicationContext appCtx, IWriteDataSink sink,
+            Object sourceType, ILogicalExpression staticPathExpr, SourceLocation pathSourceLocation,
+            IScalarEvaluatorFactory dynamicPathEvalFactory, RecordDescriptor inputDesc, int sourceColumn,
+            int[] partitionColumns, IBinaryComparatorFactory[] partitionComparatorFactories)
+            throws AlgebricksException {
+        String staticPath = staticPathExpr != null ? ConstantExpressionUtil.getStringConstant(staticPathExpr) : null;
+        IExternalFileWriterFactory fileWriterFactory =
+                ExternalWriterProvider.createWriterFactory(appCtx, sink, staticPath, pathSourceLocation);
+        fileWriterFactory.validate();
+        String fileExtension = ExternalWriterProvider.getFileExtension(sink);
+        int maxResult = ExternalWriterProvider.getMaxResult(sink);
+
         Map<String, String> configuration = sink.getConfiguration();
         String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
 
-        // Only JSON is supported for now
-        if (!ExternalDataConstants.FORMAT_JSON_LOWER_CASE.equalsIgnoreCase(format)) {
+        // Check for supported formats
+        if (!ExternalDataConstants.WRITER_SUPPORTED_FORMATS.contains(format.toLowerCase())) {
             throw new UnsupportedOperationException("Unsupported format " + format);
         }
 
         String compression = getCompression(configuration);
-        IExternalFileCompressStreamFactory compressStreamFactory =
-                createCompressionStreamFactory(appCtx, compression, configuration);
-        IPrinterFactory printerFactory = CleanJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(sourceType);
-        return new TextualExternalFilePrinterFactory(printerFactory, compressStreamFactory);
+        WriterPartitionerFactory partitionerFactory =
+                new WriterPartitionerFactory(partitionColumns, partitionComparatorFactories);
+        PathResolverFactory pathResolverFactory = new PathResolverFactory(fileWriterFactory, fileExtension,
+                dynamicPathEvalFactory, staticPath, pathSourceLocation);
+        IPrinterFactory printerFactory;
+        IExternalFileCompressStreamFactory compressStreamFactory;
+        IExternalPrinterFactory externalPrinterFactory;
+        ExternalFileWriterFactory writerFactory;
+        switch (format) {
+            case ExternalDataConstants.FORMAT_JSON_LOWER_CASE:
+                compressStreamFactory = createCompressionStreamFactory(appCtx, compression, configuration);
+                printerFactory = CleanJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(sourceType);
+                externalPrinterFactory = new TextualExternalFilePrinterFactory(printerFactory, compressStreamFactory);
+                writerFactory = new ExternalFileWriterFactory(fileWriterFactory, externalPrinterFactory,
+                        pathResolverFactory, maxResult);
+
+                return new SinkExternalWriterRuntimeFactory(sourceColumn, partitionColumns,
+                        partitionComparatorFactories, inputDesc, writerFactory);
+            case ExternalDataConstants.FORMAT_CSV_LOWER_CASE:
+                compressStreamFactory = createCompressionStreamFactory(appCtx, compression, configuration);
+                if (sink instanceof IExternalWriteDataSink) {
+                    ARecordType itemType = ((IExternalWriteDataSink) sink).getItemType();
+                    if (itemType != null) {
+                        printerFactory =
+                                CSVPrinterFactoryProvider
+                                        .createInstance(itemType, sink.getConfiguration(),
+                                                ((IExternalWriteDataSink) sink).getSourceLoc())
+                                        .getPrinterFactory(sourceType);
+                        externalPrinterFactory =
+                                new CsvExternalFilePrinterFactory(printerFactory, compressStreamFactory);
+                        writerFactory = new ExternalFileWriterFactory(fileWriterFactory, externalPrinterFactory,
+                                pathResolverFactory, maxResult);
+                        return new SinkExternalWriterRuntimeFactory(sourceColumn, partitionColumns,
+                                partitionComparatorFactories, inputDesc, writerFactory);
+                    } else {
+                        throw new CompilationException(ErrorCode.INVALID_CSV_SCHEMA);
+                    }
+                } else {
+                    throw new CompilationException(ErrorCode.INVALID_CSV_SCHEMA);
+                }
+
+            case ExternalDataConstants.FORMAT_PARQUET:
+
+                CompressionCodecName compressionCodecName;
+                if (compression == null || compression.equals("") || compression.equals("none")) {
+                    compressionCodecName = CompressionCodecName.UNCOMPRESSED;
+                } else {
+                    compressionCodecName = CompressionCodecName.valueOf(compression.toUpperCase());
+                }
+
+                String rowGroupSizeString = getRowGroupSize(configuration);
+                String pageSizeString = getPageSize(configuration);
+
+                long rowGroupSize = StorageUtil.getByteValue(rowGroupSizeString);
+                int pageSize = (int) StorageUtil.getByteValue(pageSizeString);
+                ParquetProperties.WriterVersion writerVersion = getParquetWriterVersion(configuration);
+
+                if (configuration.get(ExternalDataConstants.PARQUET_SCHEMA_KEY) != null) {
+                    String parquetSchemaString = configuration.get(ExternalDataConstants.PARQUET_SCHEMA_KEY);
+                    ParquetExternalFilePrinterFactory parquetPrinterFactory =
+                            new ParquetExternalFilePrinterFactory(compressionCodecName, parquetSchemaString,
+                                    (IAType) sourceType, rowGroupSize, pageSize, writerVersion);
+
+                    ExternalFileWriterFactory parquetWriterFactory = new ExternalFileWriterFactory(fileWriterFactory,
+                            parquetPrinterFactory, pathResolverFactory, maxResult);
+                    return new SinkExternalWriterRuntimeFactory(sourceColumn, partitionColumns,
+                            partitionComparatorFactories, inputDesc, parquetWriterFactory);
+                }
+
+                int maxSchemas = ExternalWriterProvider.getMaxParquetSchema(configuration);
+                ParquetExternalFilePrinterFactoryProvider printerFactoryProvider =
+                        new ParquetExternalFilePrinterFactoryProvider(compressionCodecName, (IAType) sourceType,
+                                rowGroupSize, pageSize, writerVersion);
+                return new ParquetSinkExternalWriterFactory(partitionerFactory, inputDesc, sourceColumn,
+                        (IAType) sourceType, maxSchemas, fileWriterFactory, maxResult, printerFactoryProvider,
+                        pathResolverFactory);
+
+            default:
+                throw new UnsupportedOperationException("Unsupported format " + format);
+        }
+
+    }
+
+    private static ParquetProperties.WriterVersion getParquetWriterVersion(Map<String, String> configuration) {
+        String writerVersionString = configuration.getOrDefault(ExternalDataConstants.PARQUET_WRITER_VERSION_KEY,
+                ExternalDataConstants.PARQUET_WRITER_VERSION_VALUE_1);
+        if (writerVersionString.equals(ExternalDataConstants.PARQUET_WRITER_VERSION_VALUE_2)) {
+            return ParquetProperties.WriterVersion.PARQUET_2_0;
+        }
+        return ParquetProperties.WriterVersion.PARQUET_1_0;
+    }
+
+    private static String getRowGroupSize(Map<String, String> configuration) {
+        return configuration.getOrDefault(ExternalDataConstants.KEY_PARQUET_ROW_GROUP_SIZE,
+                ExternalDataConstants.PARQUET_DEFAULT_ROW_GROUP_SIZE);
+    }
+
+    private static String getPageSize(Map<String, String> configuration) {
+        return configuration.getOrDefault(ExternalDataConstants.KEY_PARQUET_PAGE_SIZE,
+                ExternalDataConstants.PARQUET_DEFAULT_PAGE_SIZE);
     }
 
     private static String getFormat(Map<String, String> configuration) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/Creator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/Creator.java
new file mode 100644
index 0000000..49fca80f
--- /dev/null
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/Creator.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.metadata.utils;
+
+import java.io.Serializable;
+
+import org.apache.asterix.common.metadata.MetadataConstants;
+import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
+import org.apache.asterix.om.base.ARecord;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.types.ARecordType;
+
+public class Creator implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    public static final Creator DEFAULT_CREATOR =
+            new Creator(MetadataConstants.DEFAULT_CREATOR, MetadataConstants.DEFAULT_CREATOR_UUID);
+    private final String name;
+    private final String uuid;
+
+    private Creator(String name, String uuid) {
+        this.name = name;
+        this.uuid = uuid;
+    }
+
+    public static Creator create(String name, String uuid) {
+        return new Creator(name, uuid);
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getUuid() {
+        return uuid;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("Creator{name='%s', uuid='%s'}", name, uuid);
+    }
+
+    public static Creator createOrDefault(ARecord record) {
+        ARecordType recType = record.getType();
+        int creatorIndex = recType.getFieldIndex(MetadataRecordTypes.CREATOR_ARECORD_FIELD_NAME);
+        String name = null, uuid = null;
+
+        if (creatorIndex >= 0) {
+            ARecordType creatorType = (ARecordType) recType.getFieldTypes()[creatorIndex];
+            ARecord creatorRecord = (ARecord) record.getValueByPos(creatorIndex);
+            int nameIndex = creatorType.getFieldIndex(MetadataRecordTypes.FIELD_NAME_CREATOR_NAME);
+            int uuidIndex = creatorType.getFieldIndex(MetadataRecordTypes.FIELD_NAME_CREATOR_UUID);
+
+            if (nameIndex >= 0) {
+                name = ((AString) creatorRecord.getValueByPos(nameIndex)).getStringValue();
+            }
+            if (uuidIndex >= 0) {
+                uuid = ((AString) creatorRecord.getValueByPos(uuidIndex)).getStringValue();
+            }
+            return create(name, uuid);
+        }
+        return DEFAULT_CREATOR;
+    }
+
+}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
index 67e4606..dda2111 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
@@ -94,7 +94,8 @@
     public static Index getPrimaryIndex(Dataset dataset) {
         InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
         return Index.createPrimaryIndex(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName(),
-                id.getPartitioningKey(), id.getKeySourceIndicator(), id.getPrimaryKeyType(), dataset.getPendingOp());
+                id.getPartitioningKey(), id.getKeySourceIndicator(), id.getPrimaryKeyType(), dataset.getPendingOp(),
+                dataset.getCreator());
     }
 
     public static int[] getBtreeFieldsIfFiltered(Dataset dataset, Index index) throws AlgebricksException {
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java
index 2832470..788231c 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java
@@ -32,6 +32,7 @@
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.runtime.compression.CompressionManager;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -64,7 +65,7 @@
             Dataset dataset = new Dataset(db, dv, "log", itemTypeDb, itemTypeDv, "LogType", metaTypeDb, metaTypeDv,
                     "MetaType", "DEFAULT_NG_ALL_NODES", "prefix", compactionPolicyProperties, details,
                     Collections.emptyMap(), DatasetType.INTERNAL, 115, 0, CompressionManager.NONE,
-                    DatasetFormatInfo.SYSTEM_DEFAULT);
+                    DatasetFormatInfo.SYSTEM_DEFAULT, Creator.DEFAULT_CREATOR);
             DatasetTupleTranslator dtTranslator = new DatasetTupleTranslator(true, DatasetEntity.of(false));
             ITupleReference tuple = dtTranslator.getTupleFromMetadataEntity(dataset);
             Dataset deserializedDataset = dtTranslator.getMetadataEntityFromTuple(tuple);
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
index 00f188d..713518d 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
@@ -41,6 +41,7 @@
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import org.apache.asterix.metadata.utils.Creator;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
@@ -76,12 +77,13 @@
             Dataset dataset = new Dataset(dvTestDatabase, dvTest, "d1", itemTypeDatabase, dvFoo, "LogType",
                     metaTypeDatabase, dvCB, "MetaType", "DEFAULT_NG_ALL_NODES", "prefix", compactionPolicyProperties,
                     details, Collections.emptyMap(), DatasetType.INTERNAL, 115, 0, CompressionManager.NONE,
-                    DatasetFormatInfo.SYSTEM_DEFAULT);
+                    DatasetFormatInfo.SYSTEM_DEFAULT, Creator.DEFAULT_CREATOR);
 
             Index index = new Index(dvTestDatabase, dvTest, "d1", "i1", IndexType.BTREE,
                     Collections.singletonList(Collections.singletonList("row_id")),
                     indicator == null ? null : Collections.singletonList(indicator),
-                    Collections.singletonList(BuiltinType.AINT64), false, false, false, 0, OptionalBoolean.of(false));
+                    Collections.singletonList(BuiltinType.AINT64), false, false, false, 0, OptionalBoolean.of(false),
+                    null);
 
             MetadataNode mockMetadataNode = mock(MetadataNode.class);
             when(mockMetadataNode.getDatatype(any(), anyString(), any(DataverseName.class), anyString()))
diff --git a/asterixdb/asterix-om/pom.xml b/asterixdb/asterix-om/pom.xml
index 6db4840..9eccb86 100644
--- a/asterixdb/asterix-om/pom.xml
+++ b/asterixdb/asterix-om/pom.xml
@@ -143,8 +143,8 @@
       <artifactId>guava</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.esri.geometry</groupId>
-      <artifactId>esri-geometry-api</artifactId>
+      <groupId>org.locationtech.jts</groupId>
+      <artifactId>jts-core</artifactId>
     </dependency>
     <dependency>
       <groupId>javax.xml.bind</groupId>
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/TypeResolverUtil.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/TypeResolverUtil.java
index 9d52a85..6d9007f 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/TypeResolverUtil.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/TypeResolverUtil.java
@@ -176,9 +176,9 @@
         IAType[] fieldTypesArray = fieldTypes.toArray(new IAType[0]);
         ARecordType resultType;
         if (resultTypeIsOpen && knowsAdditonalFieldNames) {
-            resultType = new ARecordType("generalized-record-type", fieldNamesArray, fieldTypesArray, resultTypeIsOpen,
-                    allPossibleAdditionalFieldNames);
             LinkedHashSet<String> resultFieldOrder = generalizeRecordFieldOrderHint(leftType, rightType);
+            resultType = new ARecordType("generalized-record-type", fieldNamesArray, fieldTypesArray, resultTypeIsOpen,
+                    allPossibleAdditionalFieldNames, resultFieldOrder);
             if (resultFieldOrder != null) {
                 resultType.getAnnotations().add(new RecordFieldOrderAnnotation(resultFieldOrder));
             }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java
index 57f9898..fa37722 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java
@@ -51,7 +51,7 @@
         if (c == 0) {
             AGeometry geometry = AGeometrySerializerDeserializer.getAGeometryObject(b1, s1);
             c = (geometry.getGeometry()
-                    .Equals(AGeometrySerializerDeserializer.getAGeometryObject(b2, s2).getGeometry())) ? 0 : 1;
+                    .equals(AGeometrySerializerDeserializer.getAGeometryObject(b2, s2).getGeometry())) ? 0 : 1;
         }
         return c;
     }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java
index 449ce13..372b5fd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.dataflow.data.nontagged.printers;
 
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.DEFAULT_QUOTE;
+
 import java.io.DataOutput;
 import java.io.IOException;
 import java.io.OutputStream;
@@ -25,15 +27,21 @@
 import java.nio.charset.StandardCharsets;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils;
 import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
 import org.apache.asterix.om.base.temporal.GregorianCalendarSystem;
 import org.apache.hyracks.algebricks.data.utils.WriteValueTools;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.util.bytes.HexPrinter;
 import org.apache.hyracks.util.string.UTF8StringUtil;
+import org.locationtech.jts.geom.Geometry;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class PrintTools {
 
@@ -275,29 +283,55 @@
         UPPER_CASE,
     }
 
-    public static void writeUTF8StringAsCSV(byte[] b, int s, int l, OutputStream os) throws IOException {
+    public static void writeUTF8StringAsCSV(byte[] b, int s, int l, PrintStream ps, char quote, boolean forceQuote,
+            char escape, char delimiter) throws IOException {
         int stringLength = UTF8StringUtil.getUTFLength(b, s);
         int position = s + UTF8StringUtil.getNumBytesToStoreLength(stringLength);
         int maxPosition = position + stringLength;
-        os.write('"');
+        char quoteChar = quote == CSVUtils.NULL_CHAR ? DEFAULT_QUOTE : quote;
+
+        boolean shouldQuote = forceQuote;
+        if (!shouldQuote) {
+            // Check if the string contains any special characters that require quoting
+            for (int i = position; i < maxPosition; i++) {
+                char c = UTF8StringUtil.charAt(b, i);
+                if (c == quote || c == '\r' || c == '\n' || c == escape || c == delimiter) {
+                    shouldQuote = true;
+                    break;
+                }
+            }
+        }
+
+        if (shouldQuote) {
+            ps.print(quoteChar);
+        }
+
         while (position < maxPosition) {
             char c = UTF8StringUtil.charAt(b, position);
             int sz = UTF8StringUtil.charSize(b, position);
-            if (c == '"') {
-                os.write('"');
+
+            // todo: Escape character handling -- as the data is strictly quoted in case of carriage return, should "\r" needs to get handled?
+            if (c == quote || c == '\r') {
+                ps.print(escape);
             }
+
+            // Handling surrogate pairs
             if (Character.isHighSurrogate(c)) {
-                position += writeSupplementaryChar(os, b, maxPosition, position, c, sz);
+                position += writeSupplementaryChar(ps, b, maxPosition, position, c, sz);
                 continue;
             }
+
+            // Write the character bytes
             while (sz > 0) {
-                os.write(b[position]);
+                ps.print(c);
                 ++position;
                 --sz;
             }
-            break;
         }
-        os.write('"');
+
+        if (shouldQuote) {
+            ps.print(quoteChar);
+        }
     }
 
     public static void writeUTF8StringRaw(byte[] b, int s, int l, DataOutput os) throws IOException {
@@ -461,4 +495,23 @@
         return highSurrogateSize + lowSurrogateSize;
     }
 
+    /**
+     * Converts a JTS Geometry to a GeoJSON string. Returns Empty String in case of exceptions
+     *
+     * @param geometry The JTS Geometry to be converted.
+     * @return A GeoJSON string representation of the Geometry or an error message.
+     */
+    public static String geometryToGeoJSON(Geometry geometry) {
+        if (geometry == null) {
+            return "";
+        }
+        ObjectMapper mapper = new ObjectMapper();
+        mapper.registerModule(new JtsModule());
+        try {
+            return mapper.writeValueAsString(geometry);
+        } catch (JsonProcessingException e) {
+            return "";
+        }
+    }
+
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ANullPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ANullPrinterFactory.java
index dcf98a0..3d3ca3e 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ANullPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ANullPrinterFactory.java
@@ -19,16 +19,34 @@
 package org.apache.asterix.dataflow.data.nontagged.printers.csv;
 
 import java.io.PrintStream;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hyracks.algebricks.data.IPrinter;
 import org.apache.hyracks.algebricks.data.IPrinterFactory;
 
 public class ANullPrinterFactory implements IPrinterFactory {
-
     private static final long serialVersionUID = 1L;
-    public static final ANullPrinterFactory INSTANCE = new ANullPrinterFactory();
+    private static final String DEFAULT_NULL_STRING = "";
+    // Store the information about the instance based on the parameters
+    private static final ConcurrentHashMap<String, ANullPrinterFactory> instanceCache = new ConcurrentHashMap<>();
+    private String nullString;
 
-    public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps.print("null");
+    private ANullPrinterFactory(String nullString) {
+        this.nullString = nullString;
+    }
+
+    public static ANullPrinterFactory createInstance(String nullString) {
+        String key = CSVUtils.generateKey(nullString);
+        return instanceCache.computeIfAbsent(key, k -> new ANullPrinterFactory(nullString));
+    }
+
+    private final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> {
+        if (nullString != null) {
+            ps.print(nullString);
+        } else {
+            ps.print(DEFAULT_NULL_STRING);
+        }
+    };
 
     @Override
     public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java
index f1e2300..20b1ef0 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java
@@ -18,12 +18,22 @@
  */
 package org.apache.asterix.dataflow.data.nontagged.printers.csv;
 
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_DELIMITER;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_EMPTY_FIELD_AS_NULL;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_ESCAPE;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_FORCE_QUOTE;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_NULL;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_QUOTE;
+
 import java.io.PrintStream;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.asterix.om.pointables.ARecordVisitablePointable;
 import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
 import org.apache.asterix.om.pointables.printer.IPrintVisitor;
 import org.apache.asterix.om.pointables.printer.csv.APrintVisitor;
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.EnumDeserializer;
 import org.apache.hyracks.algebricks.common.utils.Pair;
@@ -32,11 +42,26 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class AObjectPrinterFactory implements IPrinterFactory {
-
     private static final long serialVersionUID = 1L;
-    public static final AObjectPrinterFactory INSTANCE = new AObjectPrinterFactory();
+    private static final ConcurrentHashMap<String, AObjectPrinterFactory> instanceCache = new ConcurrentHashMap<>();
+    private ARecordType itemType;
+    private Map<String, String> configuration;
+    private boolean emptyFieldAsNull;
 
-    public static boolean printFlatValue(ATypeTag typeTag, byte[] b, int s, int l, PrintStream ps)
+    private AObjectPrinterFactory(ARecordType itemType, Map<String, String> configuration) {
+        this.itemType = itemType;
+        this.configuration = configuration;
+        String emptyFieldAsNullStr = configuration.get(KEY_EMPTY_FIELD_AS_NULL);
+        this.emptyFieldAsNull = emptyFieldAsNullStr != null && Boolean.parseBoolean(emptyFieldAsNullStr);
+    }
+
+    public static AObjectPrinterFactory createInstance(ARecordType itemType, Map<String, String> configuration) {
+        // generate a unique identifier based on the parameters and hash the instance corresponding to it.
+        String key = CSVUtils.generateKey(itemType, configuration);
+        return instanceCache.computeIfAbsent(key, k -> new AObjectPrinterFactory(itemType, configuration));
+    }
+
+    public boolean printFlatValue(ATypeTag typeTag, byte[] b, int s, int l, PrintStream ps)
             throws HyracksDataException {
         switch (typeTag) {
             case TINYINT:
@@ -53,7 +78,7 @@
                 return true;
             case MISSING:
             case NULL:
-                ANullPrinterFactory.PRINTER.print(b, s, l, ps);
+                ANullPrinterFactory.createInstance(configuration.get(KEY_NULL)).createPrinter().print(b, s, l, ps);
                 return true;
             case BOOLEAN:
                 ABooleanPrinterFactory.PRINTER.print(b, s, l, ps);
@@ -104,7 +129,14 @@
                 ARectanglePrinterFactory.PRINTER.print(b, s, l, ps);
                 return true;
             case STRING:
-                AStringPrinterFactory.PRINTER.print(b, s, l, ps);
+                if (emptyFieldAsNull && CSVUtils.isEmptyString(b, s, l)) {
+                    ANullPrinterFactory.createInstance(configuration.get(KEY_NULL)).createPrinter().print(b, s, l, ps);
+                } else {
+                    AStringPrinterFactory
+                            .createInstance(configuration.get(KEY_QUOTE), configuration.get(KEY_FORCE_QUOTE),
+                                    configuration.get(KEY_ESCAPE), configuration.get(KEY_DELIMITER))
+                            .createPrinter().print(b, s, l, ps);
+                }
                 return true;
             case BINARY:
                 ABinaryHexPrinterFactory.PRINTER.print(b, s, l, ps);
@@ -119,11 +151,10 @@
 
     @Override
     public IPrinter createPrinter() {
-        final ARecordVisitablePointable rPointable =
+        final ARecordVisitablePointable recordVisitablePointable =
                 new ARecordVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
         final Pair<PrintStream, ATypeTag> streamTag = new Pair<>(null, null);
-
-        final IPrintVisitor visitor = new APrintVisitor();
+        final IPrintVisitor visitor = new APrintVisitor(itemType, configuration);
 
         return (byte[] b, int s, int l, PrintStream ps) -> {
             ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[s]);
@@ -132,8 +163,8 @@
                 streamTag.second = typeTag;
                 switch (typeTag) {
                     case OBJECT:
-                        rPointable.set(b, s, l);
-                        visitor.visit(rPointable, streamTag);
+                        recordVisitablePointable.set(b, s, l);
+                        visitor.visit(recordVisitablePointable, streamTag);
                         break;
                     default:
                         throw new HyracksDataException("No printer for type " + typeTag);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java
index 909fd60..2b31fb0 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.dataflow.data.nontagged.printers.csv;
 
 import java.io.PrintStream;
+import java.util.Map;
 
 import org.apache.asterix.om.pointables.PointableAllocator;
 import org.apache.asterix.om.pointables.base.DefaultOpenFieldType;
@@ -36,9 +37,13 @@
 
     private static final long serialVersionUID = 1L;
     private final ARecordType recType;
+    private final ARecordType itemType;
+    private final Map<String, String> configuration;
 
-    public ARecordPrinterFactory(ARecordType recType) {
+    public ARecordPrinterFactory(ARecordType recType, ARecordType itemType, Map<String, String> configuration) {
         this.recType = recType;
+        this.itemType = itemType;
+        this.configuration = configuration;
     }
 
     @Override
@@ -47,7 +52,7 @@
         final IAType inputType =
                 recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT) : recType;
         final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
-        final APrintVisitor printVisitor = new APrintVisitor();
+        final APrintVisitor printVisitor = new APrintVisitor(itemType, configuration);
         final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
 
         return new IPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AStringPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AStringPrinterFactory.java
index c217203..ae368bd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AStringPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AStringPrinterFactory.java
@@ -18,8 +18,14 @@
  */
 package org.apache.asterix.dataflow.data.nontagged.printers.csv;
 
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.DEFAULT_VALUES;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_DELIMITER;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_ESCAPE;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_QUOTE;
+
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.asterix.dataflow.data.nontagged.printers.PrintTools;
 import org.apache.hyracks.algebricks.data.IPrinter;
@@ -27,18 +33,52 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class AStringPrinterFactory implements IPrinterFactory {
-
     private static final long serialVersionUID = 1L;
-    public static final AStringPrinterFactory INSTANCE = new AStringPrinterFactory();
+    private static final ConcurrentHashMap<String, AStringPrinterFactory> instanceCache = new ConcurrentHashMap<>();
+    private static final String NONE = "none";
+    private String quote;
+    private Boolean forceQuote;
+    private String escape;
+    private String delimiter;
 
-    public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> {
+    private AStringPrinterFactory(String quote, Boolean forceQuote, String escape, String delimiter) {
+        this.quote = quote;
+        this.forceQuote = forceQuote;
+        this.escape = escape;
+        this.delimiter = delimiter;
+    }
+
+    public static AStringPrinterFactory createInstance(String quote, String forceQuoteStr, String escape,
+            String delimiter) {
+        boolean forceQuote = forceQuoteStr == null || Boolean.parseBoolean(forceQuoteStr);
+        String key = CSVUtils.generateKey(quote, forceQuoteStr, escape, delimiter);
+        return instanceCache.computeIfAbsent(key, k -> new AStringPrinterFactory(quote, forceQuote, escape, delimiter));
+    }
+
+    private final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> {
         try {
-            PrintTools.writeUTF8StringAsCSV(b, s + 1, l - 1, ps);
+            char quoteChar =
+                    quote == null ? extractSingleChar(DEFAULT_VALUES.get(KEY_QUOTE)) : extractSingleChar(quote);
+            char escapeChar =
+                    escape == null ? extractSingleChar(DEFAULT_VALUES.get(KEY_ESCAPE)) : extractSingleChar(escape);
+            char delimiterChar = delimiter == null ? extractSingleChar(DEFAULT_VALUES.get(KEY_DELIMITER))
+                    : extractSingleChar(delimiter);
+            PrintTools.writeUTF8StringAsCSV(b, s + 1, l - 1, ps, quoteChar, forceQuote, escapeChar, delimiterChar);
         } catch (IOException e) {
             throw HyracksDataException.create(e);
         }
     };
 
+    public char extractSingleChar(String input) throws IOException {
+        if (input != null && input.length() == 1) {
+            return input.charAt(0);
+        } else if (input.equalsIgnoreCase(NONE)) {
+            return CSVUtils.NULL_CHAR; // Replace 'none' with null character
+        } else {
+            throw new IOException("Input string must be a single character");
+        }
+    }
+
     @Override
     public IPrinter createPrinter() {
         return PRINTER;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/CSVUtils.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/CSVUtils.java
new file mode 100644
index 0000000..b50816a
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/CSVUtils.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.dataflow.data.nontagged.printers.csv;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.om.types.ARecordType;
+
+public class CSVUtils {
+
+    // Constants for the supported CSV parameters
+    public static final String KEY_NULL = "null";
+    public static final String KEY_ESCAPE = "escape";
+    public static final String KEY_HEADER = "header";
+    public static final String KEY_DELIMITER = "delimiter";
+    public static final String KEY_RECORD_DELIMITER = "recordDelimiter";
+    public static final String KEY_FORCE_QUOTE = "forceQuote";
+    public static final String KEY_QUOTE = "quote";
+    public static final String KEY_EMPTY_FIELD_AS_NULL = "empty_field_as_null";
+    public static final char DEFAULT_QUOTE = '"';
+    private static final String DEFAULT_DELIMITER_VALUE = ",";
+    private static final String DEFAULT_NULL_VALUE = "";
+    private static final String DOUBLE_QUOTES = "\"";
+    public static final char NULL_CHAR = '\0';
+    private static final String FALSE = "false";
+    private static final String DEFAULT_RECORD_DELIMITER = "\n";
+
+    // List of supported parameters
+    public static final List<String> CSV_PARAMETERS = Arrays.asList(KEY_NULL, KEY_ESCAPE, KEY_HEADER, KEY_DELIMITER,
+            KEY_RECORD_DELIMITER, KEY_FORCE_QUOTE, KEY_QUOTE, KEY_EMPTY_FIELD_AS_NULL);
+
+    // Default values for each parameter
+    public static final Map<String, String> DEFAULT_VALUES;
+
+    static {
+        DEFAULT_VALUES = new HashMap<>();
+        DEFAULT_VALUES.put(KEY_NULL, DEFAULT_NULL_VALUE);
+        DEFAULT_VALUES.put(KEY_ESCAPE, DOUBLE_QUOTES);
+        DEFAULT_VALUES.put(KEY_HEADER, FALSE);
+        DEFAULT_VALUES.put(KEY_DELIMITER, DEFAULT_DELIMITER_VALUE);
+        DEFAULT_VALUES.put(KEY_RECORD_DELIMITER, DEFAULT_RECORD_DELIMITER);
+        DEFAULT_VALUES.put(KEY_FORCE_QUOTE, FALSE);
+        DEFAULT_VALUES.put(KEY_QUOTE, DOUBLE_QUOTES);
+        DEFAULT_VALUES.put(KEY_EMPTY_FIELD_AS_NULL, FALSE);
+    }
+
+    // Generate a key based on configuration for ARecordType and parameters
+    public static String generateKey(ARecordType itemType, Map<String, String> configuration) {
+        StringBuilder keyBuilder = new StringBuilder();
+        keyBuilder.append(itemType == null ? KEY_NULL : itemType.toString()).append(" | ");
+        // Iterate through supported CSV parameters and append their values from configuration
+        for (String param : CSV_PARAMETERS) {
+            String value = configuration.getOrDefault(param, DEFAULT_VALUES.get(param));
+            keyBuilder.append(param).append(" : ").append(value).append(" | ");
+        }
+        // Remove the trailing " | "
+        if (keyBuilder.length() > 0 && keyBuilder.charAt(keyBuilder.length() - 2) == '|') {
+            keyBuilder.setLength(keyBuilder.length() - 3);
+        }
+        return keyBuilder.toString();
+    }
+
+    public static String generateKey(String quote, String forceQuoteStr, String escape, String delimiter) {
+        // Use default values when no values are specified (null)
+        return KEY_QUOTE + " : " + (quote != null ? quote : DEFAULT_VALUES.get(KEY_QUOTE)) + " | " + KEY_FORCE_QUOTE
+                + " : " + (forceQuoteStr != null ? forceQuoteStr : DEFAULT_VALUES.get(KEY_FORCE_QUOTE)) + " | "
+                + KEY_ESCAPE + " : " + (escape != null ? escape : DEFAULT_VALUES.get(KEY_ESCAPE)) + " | "
+                + KEY_DELIMITER + " : " + (delimiter != null ? delimiter : DEFAULT_VALUES.get(KEY_DELIMITER));
+    }
+
+    public static String generateKey(String nullString) {
+        // Use the default value when nullString is not specified (null)
+        return KEY_NULL + " : " + (nullString != null ? nullString : DEFAULT_VALUES.get(KEY_NULL));
+    }
+
+    public static boolean isEmptyString(byte[] b, int s, int l) {
+        return b == null || l <= 2 || s < 0 || s + l > b.length;
+    }
+
+    public static String getDelimiter(Map<String, String> configuration) {
+        return configuration.get(KEY_DELIMITER) == null ? DEFAULT_DELIMITER_VALUE : configuration.get(KEY_DELIMITER);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java
index cc74f85..848e1bd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java
@@ -23,11 +23,11 @@
 import java.io.DataInputStream;
 import java.io.PrintStream;
 
+import org.apache.asterix.dataflow.data.nontagged.printers.PrintTools;
 import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
 import org.apache.hyracks.algebricks.data.IPrinter;
 import org.apache.hyracks.algebricks.data.IPrinterFactory;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
 
 public class AGeometryPrinterFactory implements IPrinterFactory {
 
@@ -37,8 +37,8 @@
     public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> {
         ByteArrayInputStream inStream = new ByteArrayInputStream(b, s + 1, l - 1);
         DataInput dataIn = new DataInputStream(inStream);
-        OGCGeometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
-        ps.print(geometry.asGeoJson());
+        Geometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+        ps.print(PrintTools.geometryToGeoJSON(geometry));
     };
 
     @Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java
index 38a4c46..28c171d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java
@@ -21,19 +21,18 @@
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.Arrays;
 
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
 import org.apache.asterix.om.base.AGeometry;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.OperatorImportFromWkb;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WkbImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKBReader;
+import org.locationtech.jts.io.WKBWriter;
 
 public class AGeometrySerializerDeserializer implements ISerializerDeserializer<AGeometry> {
 
@@ -41,24 +40,19 @@
 
     public static final AGeometrySerializerDeserializer INSTANCE = new AGeometrySerializerDeserializer();
 
-    /**Use WGS 84 (EPSG:4326) as the default coordinate reference system*/
-    public static final SpatialReference DEFAULT_CRS = SpatialReference.create(4326);
-
     private AGeometrySerializerDeserializer() {
     }
 
     @Override
     public AGeometry deserialize(DataInput in) throws HyracksDataException {
+        WKBReader wkbReader = new WKBReader();
         try {
             int length = in.readInt();
             byte[] bytes = new byte[length];
             in.readFully(bytes);
-            ByteBuffer buffer = ByteBuffer.wrap(bytes);
-            OGCGeometry geometry = OGCGeometry.createFromOGCStructure(
-                    OperatorImportFromWkb.local().executeOGC(WkbImportFlags.wkbImportDefaults, buffer, null),
-                    DEFAULT_CRS);
+            Geometry geometry = wkbReader.read(bytes);
             return new AGeometry(geometry);
-        } catch (IOException e) {
+        } catch (IOException | ParseException e) {
             throw HyracksDataException.create(e);
         }
     }
@@ -66,8 +60,10 @@
     @Override
     public void serialize(AGeometry instance, DataOutput out) throws HyracksDataException {
         try {
-            OGCGeometry geometry = instance.getGeometry();
-            byte[] buffer = geometry.asBinary().array();
+            Geometry geometry = instance.getGeometry();
+            WKBWriter wkbWriter = new WKBWriter(GeoFunctionUtils.getCoordinateDimension(geometry),
+                    GeoFunctionUtils.LITTLE_ENDIAN_BYTEORDER);
+            byte[] buffer = wkbWriter.write(geometry);
             // For efficiency, we store the size of the geometry in bytes in the first 32 bits
             // This allows AsterixDB to skip over this attribute if needed.
             out.writeInt(buffer.length);
@@ -77,24 +73,40 @@
         }
     }
 
-    public static int getAGeometrySizeOffset() throws HyracksDataException {
+    public void serialize(Geometry geometry, DataOutput out) throws HyracksDataException {
+        try {
+            WKBWriter wkbWriter = new WKBWriter(GeoFunctionUtils.getCoordinateDimension(geometry),
+                    GeoFunctionUtils.LITTLE_ENDIAN_BYTEORDER);
+            byte[] buffer = wkbWriter.write(geometry);
+            // For efficiency, we store the size of the geometry in bytes in the first 32 bits
+            // This allows AsterixDB to skip over this attribute if needed.
+            out.writeInt(buffer.length);
+            out.write(buffer);
+        } catch (IOException e) {
+            throw HyracksDataException.create(e);
+        }
+    }
+
+    public static int getAGeometrySizeOffset() {
         return 0;
     }
 
     public static AGeometry getAGeometryObject(byte[] bytes, int startOffset) throws HyracksDataException {
         // Size of the AGeometry object is stored in bytes in the first 32 bits
         // See serialize method
+        WKBReader wkbReader = new WKBReader();
         int size = AInt32SerializerDeserializer.getInt(bytes, startOffset);
 
         if (bytes.length < startOffset + size + 4)
             // TODO(mmahin): this error code takes 5 parameters, and this is passing none, so I suspect this isn't right
             throw RuntimeDataException.create(ErrorCode.VALUE_OUT_OF_RANGE);
-
-        // Skip the size of the geometry in first 4 bytes
-        byte[] bytes1 = Arrays.copyOfRange(bytes, startOffset + 4, startOffset + size + 4);
-        ByteBuffer buffer = ByteBuffer.wrap(bytes1);
-        OGCGeometry geometry = OGCGeometry.createFromOGCStructure(
-                OperatorImportFromWkb.local().executeOGC(WkbImportFlags.wkbImportDefaults, buffer, null), DEFAULT_CRS);
-        return new AGeometry(geometry);
+        try {
+            // Skip the size of the geometry in first 4 bytes
+            byte[] bytes1 = Arrays.copyOfRange(bytes, startOffset + 4, startOffset + size + 4);
+            Geometry geometry = wkbReader.read(bytes1);
+            return new AGeometry(geometry);
+        } catch (ParseException e) {
+            throw HyracksDataException.create(e);
+        }
     }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java
new file mode 100644
index 0000000..1e92b62
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts;
+
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.Geometry;
+
+public class GeoFunctionUtils {
+    public static final int LITTLE_ENDIAN_BYTEORDER = 2;
+
+    /**
+     * Returns the dimension of the coordinate based on whether Z or M is defined.
+     * TODO: Add 4th dimension support
+     * @param geometry The geometry to check.
+     * @return the dimensionality of the coordinate (2 or 3 or 4).
+     */
+    public static int getCoordinateDimension(Geometry geometry) {
+        int dimension = 2;
+        if (geometry == null || geometry.isEmpty()) {
+            return 2;
+        }
+        Coordinate sample = geometry.getCoordinates()[0];
+        if (!Double.isNaN(sample.getZ())) {
+            dimension++;
+        }
+
+        if (sample instanceof CoordinateXYZM) {
+            CoordinateXYZM firstCoordXYZM = (CoordinateXYZM) sample;
+            if (!Double.isNaN(firstCoordXYZM.getM())) {
+                dimension = 3;
+            }
+        }
+        return dimension;
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java
new file mode 100644
index 0000000..7f3d1de
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts;
+
+/**
+ * Constants used for GeoJSON serialization and deserialization of JTS geometries.
+ * This class provides string constants that represent various GeoJSON object types
+ * and properties, such as types of geometries and common GeoJSON attributes like
+ * coordinates and type.
+ * <p>
+ * These constants are used throughout the Jackson JTS module to ensure consistency
+ * in processing and generating GeoJSON.
+ */
+public class GeoJsonConstants {
+    public static final String POINT = "Point";
+    public static final String LINE_STRING = "LineString";
+    public static final String POLYGON = "Polygon";
+
+    public static final String MULTI_POINT = "MultiPoint";
+    public static final String MULTI_LINE_STRING = "MultiLineString";
+    public static final String MULTI_POLYGON = "MultiPolygon";
+
+    public static final String GEOMETRY_COLLECTION = "GeometryCollection";
+
+    public static final String TYPE = "type";
+
+    public static final String GEOMETRIES = "geometries";
+
+    public static final String COORDINATES = "coordinates";
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java
new file mode 100644
index 0000000..77b31ce
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/JtsModule.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.GenericGeometryParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.GeometryCollectionParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.LineStringParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.MultiLineStringParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.MultiPointParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.MultiPolygonParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.PointParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.PolygonParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde.GeometryDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde.GeometrySerializer;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
+import org.locationtech.jts.geom.MultiPoint;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Point;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+
+/**
+ * A Jackson module for serializing and deserializing JTS (Java Topology Suite) geometry objects.
+ * This module provides custom serializers and deserializers capable of handling various types
+ * of geometries such as Points, LineStrings, Polygons, and their respective multi-part counterparts,
+ * as well as GeometryCollections.
+ * <p>
+ * It leverages a {@link GeometryFactory} for the creation of geometry objects during deserialization,
+ * ensuring that geometry objects are constructed appropriately.
+ */
+public class JtsModule extends SimpleModule {
+    private static final long serialVersionUID = 324082011931609589L;
+
+    public JtsModule() {
+        this(new GeometryFactory());
+    }
+
+    /**
+     * Constructs a JtsModule with a specified {@link GeometryFactory}.
+     * This constructor allows for customization of the geometry factory used for creating
+     * JTS geometry objects, providing flexibility for various precision and srid settings.
+     *
+     * @param geometryFactory the geometry factory to use for creating geometry objects
+     *                        during deserialization
+     */
+    public JtsModule(GeometryFactory geometryFactory) {
+        super("JtsModule", new Version(1, 0, 0, null, null, null));
+
+        addSerializer(Geometry.class, new GeometrySerializer());
+        GenericGeometryParser genericGeometryParser = new GenericGeometryParser(geometryFactory);
+        addDeserializer(Geometry.class, new GeometryDeserializer<>(genericGeometryParser));
+        addDeserializer(Point.class, new GeometryDeserializer<>(new PointParser(geometryFactory)));
+        addDeserializer(MultiPoint.class, new GeometryDeserializer<>(new MultiPointParser(geometryFactory)));
+        addDeserializer(LineString.class, new GeometryDeserializer<>(new LineStringParser(geometryFactory)));
+        addDeserializer(MultiLineString.class, new GeometryDeserializer<>(new MultiLineStringParser(geometryFactory)));
+        addDeserializer(Polygon.class, new GeometryDeserializer<>(new PolygonParser(geometryFactory)));
+        addDeserializer(MultiPolygon.class, new GeometryDeserializer<>(new MultiPolygonParser(geometryFactory)));
+        addDeserializer(GeometryCollection.class,
+                new GeometryDeserializer<>(new GeometryCollectionParser(geometryFactory, genericGeometryParser)));
+    }
+
+    @Override
+    public void setupModule(SetupContext context) {
+        super.setupModule(context);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java
new file mode 100644
index 0000000..4721680
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/BaseParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import org.locationtech.jts.geom.GeometryFactory;
+
+/**
+ * A base class for parsers that convert GeoJSON data into JTS Geometry objects.
+ * This class provides common functionality and a shared {@link GeometryFactory} for its subclasses.
+ * Subclasses of {@code BaseParser} use this shared geometry factory to construct specific types of geometries from
+ * their GeoJSON representations. By extending this base class, they inherit the common functionality and the ability
+ * to leverage the same geometry creation process.
+ */
+public class BaseParser {
+
+    protected GeometryFactory geometryFactory;
+
+    public BaseParser(GeometryFactory geometryFactory) {
+        this.geometryFactory = geometryFactory;
+    }
+
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java
new file mode 100644
index 0000000..d1c9475
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/GenericGeometryParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting various types of GeoJSON geometries into JTS {@link Geometry} objects using a specified {@link GeometryFactory}.
+ * This class acts as a general-purpose parser that can handle multiple types of GeoJSON geometries, including Point, MultiPoint,
+ * LineString, MultiLineString, Polygon, MultiPolygon, and GeometryCollection. It dynamically delegates the parsing to specific
+ * geometry parsers based on the GeoJSON type of the geometry.
+ * The parser extends {@code BaseParser} to utilize shared functionality and ensure consistent application of the {@link GeometryFactory}
+ * for creating JTS geometry instances. It maintains a registry of individual geometry parsers, each capable of handling a specific
+ * type of GeoJSON geometry.
+ */
+public class GenericGeometryParser extends BaseParser implements GeometryParser<Geometry> {
+
+    private Map<String, GeometryParser> parsers;
+
+    public GenericGeometryParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+        parsers = new HashMap<>();
+        parsers.put(GeoJsonConstants.POINT, new PointParser(geometryFactory));
+        parsers.put(GeoJsonConstants.MULTI_POINT, new MultiPointParser(geometryFactory));
+        parsers.put(GeoJsonConstants.LINE_STRING, new LineStringParser(geometryFactory));
+        parsers.put(GeoJsonConstants.MULTI_LINE_STRING, new MultiLineStringParser(geometryFactory));
+        parsers.put(GeoJsonConstants.POLYGON, new PolygonParser(geometryFactory));
+        parsers.put(GeoJsonConstants.MULTI_POLYGON, new MultiPolygonParser(geometryFactory));
+        parsers.put(GeoJsonConstants.GEOMETRY_COLLECTION, new GeometryCollectionParser(geometryFactory, this));
+    }
+
+    @Override
+    public Geometry geometryFromJson(JsonNode node) throws JsonMappingException {
+        String typeName = node.get(GeoJsonConstants.TYPE).asText();
+        GeometryParser parser = parsers.get(typeName);
+        if (parser != null) {
+            return parser.geometryFromJson(node);
+        } else {
+            throw new JsonMappingException("Invalid geometry type: " + typeName);
+        }
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java
new file mode 100644
index 0000000..a945675
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/GeometryCollectionParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.GeometryFactory;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON geometry collection data into JTS {@link GeometryCollection} objects using a specified {@link GeometryFactory}.
+ * This class is designed to handle the parsing of GeoJSON representations of geometry collections, which may include multiple geometries
+ * of different types (e.g., Points, LineStrings, Polygons, etc.). It utilizes a {@link GenericGeometryParser} to handle the parsing of individual
+ * geometries within the collection.
+ *
+ * The parser extends {@code BaseParser} to leverage common functionality and ensure the consistent application of the {@link GeometryFactory}
+ * for creating {@link GeometryCollection} instances.
+ */
+public class GeometryCollectionParser extends BaseParser implements GeometryParser<GeometryCollection> {
+
+    private GenericGeometryParser genericGeometriesParser;
+
+    public GeometryCollectionParser(GeometryFactory geometryFactory, GenericGeometryParser genericGeometriesParser) {
+        super(geometryFactory);
+        this.genericGeometriesParser = genericGeometriesParser;
+    }
+
+    private Geometry[] geometriesFromJson(JsonNode arrayOfGeoms) throws JsonMappingException {
+        Geometry[] items = new Geometry[arrayOfGeoms.size()];
+        for (int i = 0; i != arrayOfGeoms.size(); ++i) {
+            items[i] = genericGeometriesParser.geometryFromJson(arrayOfGeoms.get(i));
+        }
+        return items;
+    }
+
+    @Override
+    public GeometryCollection geometryFromJson(JsonNode node) throws JsonMappingException {
+        return geometryFactory.createGeometryCollection(geometriesFromJson(node.get(GeoJsonConstants.GEOMETRIES)));
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java
new file mode 100644
index 0000000..5e3bacc
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/GeometryParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import org.locationtech.jts.geom.Geometry;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * An interface for parsing GeoJSON data into JTS {@link Geometry} objects.
+ * This interface defines a contract for classes that convert JSON representations of geometries
+ * into specific JTS geometry instances, such as Points, LineStrings, Polygons, etc.
+ * The {@code GeometryParser} interface ensures a standard method is available for deserializing
+ * GeoJSON structures into their corresponding JTS geometrical forms. Implementations of this interface
+ * are responsible for handling the parsing logic for different types of geometries.
+ *
+ * @param <T> the type of JTS Geometry that the parser will produce, such as Point, LineString, Polygon, etc.
+ */
+public interface GeometryParser<T extends Geometry> {
+
+    T geometryFromJson(JsonNode node) throws JsonMappingException;
+
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java
new file mode 100644
index 0000000..1c744db
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/LineStringParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON line string data into JTS {@link LineString} objects using a specified {@link GeometryFactory}.
+ * This class parses JSON representations of line strings, which are essentially ordered sets of coordinates that form a continuous line.
+ * It supports the extraction and transformation of coordinate arrays from GeoJSON into JTS LineString geometries.
+ * The parser extends {@code BaseParser} to utilize common functionalities and ensures the consistent application of the {@link GeometryFactory}
+ * for creating {@link LineString} instances.
+ */
+public class LineStringParser extends BaseParser implements GeometryParser<LineString> {
+
+    public LineStringParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+    }
+
+    public LineString lineStringFromJson(JsonNode root) {
+        return geometryFactory.createLineString(PointParser.coordinatesFromJson(root.get(COORDINATES)));
+    }
+
+    @Override
+    public LineString geometryFromJson(JsonNode node) throws JsonMappingException {
+        return lineStringFromJson(node);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java
new file mode 100644
index 0000000..9a7fbae
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/MultiLineStringParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON multi-line string data into JTS {@link MultiLineString} objects using a specified {@link GeometryFactory}.
+ * This class handles the parsing of multi-line string geometries, where each line string is represented by an array of coordinate arrays.
+ * It supports the conversion of complex multi-line string geometries, which may consist of multiple, distinct line strings.
+ * The parser extends {@code BaseParser} to utilize shared functionality and ensure the consistent application of the {@link GeometryFactory}
+ * in creating {@link MultiLineString} instances.
+ */
+public class MultiLineStringParser extends BaseParser implements GeometryParser<MultiLineString> {
+
+    public MultiLineStringParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+    }
+
+    public MultiLineString multiLineStringFromJson(JsonNode root) {
+        return geometryFactory.createMultiLineString(lineStringsFromJson(root.get(COORDINATES)));
+    }
+
+    private LineString[] lineStringsFromJson(JsonNode array) {
+        LineString[] strings = new LineString[array.size()];
+        for (int i = 0; i != array.size(); ++i) {
+            strings[i] = geometryFactory.createLineString(PointParser.coordinatesFromJson(array.get(i)));
+        }
+        return strings;
+    }
+
+    @Override
+    public MultiLineString geometryFromJson(JsonNode node) throws JsonMappingException {
+        return multiLineStringFromJson(node);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java
new file mode 100644
index 0000000..a27d60a
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/MultiPointParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.MultiPoint;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON multi-point data into JTS {@link MultiPoint} objects using a specified {@link GeometryFactory}.
+ * This class is capable of parsing JSON representations of multi-point geometries, where each point is represented
+ * by an array of coordinates. The parser extends {@code BaseParser} to utilize common functionality and ensure
+ * the {@link GeometryFactory} is applied consistently to create {@link MultiPoint} instances.
+ */
+public class MultiPointParser extends BaseParser implements GeometryParser<MultiPoint> {
+
+    public MultiPointParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+    }
+
+    public MultiPoint multiPointFromJson(JsonNode root) {
+        return geometryFactory.createMultiPointFromCoords(PointParser.coordinatesFromJson(root.get(COORDINATES)));
+    }
+
+    @Override
+    public MultiPoint geometryFromJson(JsonNode node) throws JsonMappingException {
+        return multiPointFromJson(node);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java
new file mode 100644
index 0000000..c9a4235
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/MultiPolygonParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON multi-polygon data into JTS {@link MultiPolygon} objects using a specified {@link GeometryFactory}.
+ * This class leverages a helper {@link PolygonParser} to parse individual polygons from a collection of polygons represented
+ * in a GeoJSON format. It supports parsing complex multi-polygon geometries, which can include multiple outer boundaries and
+ * their respective inner holes.
+ * The parser extends {@code BaseParser} to make use of common functionalities and ensure the consistent application of the
+ * {@link GeometryFactory} in creating {@link MultiPolygon} instances.
+ */
+public class MultiPolygonParser extends BaseParser implements GeometryParser<MultiPolygon> {
+
+    private PolygonParser helperParser;
+
+    public MultiPolygonParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+        helperParser = new PolygonParser(geometryFactory);
+    }
+
+    public MultiPolygon multiPolygonFromJson(JsonNode root) {
+        JsonNode arrayOfPolygons = root.get(COORDINATES);
+        return geometryFactory.createMultiPolygon(polygonsFromJson(arrayOfPolygons));
+    }
+
+    private Polygon[] polygonsFromJson(JsonNode arrayOfPolygons) {
+        Polygon[] polygons = new Polygon[arrayOfPolygons.size()];
+        for (int i = 0; i != arrayOfPolygons.size(); ++i) {
+            polygons[i] = helperParser.polygonFromJsonArrayOfRings(arrayOfPolygons.get(i));
+        }
+        return polygons;
+    }
+
+    @Override
+    public MultiPolygon geometryFromJson(JsonNode node) throws JsonMappingException {
+        return multiPolygonFromJson(node);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java
new file mode 100644
index 0000000..a91a326
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/PointParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * Parses JSON representations of GeoJSON Points into JTS {@link Point} objects.
+ * This parser handles the extraction of coordinates from GeoJSON and converts them
+ * into {@link Point} geometries using a provided {@link GeometryFactory}.
+ *
+ * This class supports reading points defined with two-dimensional (x, y),
+ * three-dimensional (x, y, z), and four-dimensional (x, y, z, m) coordinates.
+ */
+public class PointParser extends BaseParser implements GeometryParser<Point> {
+
+    public PointParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+    }
+
+    public static Coordinate coordinateFromJson(JsonNode array) {
+        assert array.isArray() && (array.size() == 2 || array.size() == 3
+                || array.size() == 4) : "expecting coordinate array with single point [ x, y, |z|, |m| ]";
+
+        if (array.size() == 2) {
+            return new Coordinate(array.get(0).asDouble(), array.get(1).asDouble());
+        }
+
+        if (array.size() == 3) {
+            return new Coordinate(array.get(0).asDouble(), array.get(1).asDouble(), array.get(2).asDouble());
+        }
+
+        return new CoordinateXYZM(array.get(0).asDouble(), array.get(1).asDouble(), array.get(2).asDouble(),
+                array.get(3).asDouble());
+    }
+
+    public static Coordinate[] coordinatesFromJson(JsonNode array) {
+        Coordinate[] points = new Coordinate[array.size()];
+        for (int i = 0; i != array.size(); ++i) {
+            points[i] = PointParser.coordinateFromJson(array.get(i));
+        }
+        return points;
+    }
+
+    public Point pointFromJson(JsonNode node) {
+        return geometryFactory.createPoint(coordinateFromJson(node.get(COORDINATES)));
+    }
+
+    @Override
+    public Point geometryFromJson(JsonNode node) throws JsonMappingException {
+        return pointFromJson(node);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java
new file mode 100644
index 0000000..5c7492a
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/PolygonParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LinearRing;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for transforming GeoJSON polygon data into JTS {@link Polygon} objects using a specified {@link GeometryFactory}.
+ * This class handles the parsing of polygons, which may include an outer boundary and any number of inner holes.
+ * Each polygon is defined by arrays of coordinates that represent linear rings—the first array defines the exterior boundary,
+ * and any subsequent arrays define interior holes.
+ * This parser extends {@code BaseParser} to leverage shared functionality and ensure consistent application of the
+ * {@link GeometryFactory} in creating polygon geometries.
+ */
+public class PolygonParser extends BaseParser implements GeometryParser<Polygon> {
+
+    public PolygonParser(GeometryFactory geometryFactory) {
+        super(geometryFactory);
+    }
+
+    public Polygon polygonFromJson(JsonNode node) {
+        JsonNode arrayOfRings = node.get(COORDINATES);
+        return polygonFromJsonArrayOfRings(arrayOfRings);
+    }
+
+    public Polygon polygonFromJsonArrayOfRings(JsonNode arrayOfRings) {
+        LinearRing shell = linearRingsFromJson(arrayOfRings.get(0));
+        int size = arrayOfRings.size();
+        LinearRing[] holes = new LinearRing[size - 1];
+        for (int i = 1; i < size; i++) {
+            holes[i - 1] = linearRingsFromJson(arrayOfRings.get(i));
+        }
+        return geometryFactory.createPolygon(shell, holes);
+    }
+
+    private LinearRing linearRingsFromJson(JsonNode coordinates) {
+        assert coordinates.isArray() : "expected coordinates array";
+        return geometryFactory.createLinearRing(PointParser.coordinatesFromJson(coordinates));
+    }
+
+    @Override
+    public Polygon geometryFromJson(JsonNode node) throws JsonMappingException {
+        return polygonFromJson(node);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java
new file mode 100644
index 0000000..1cebfcd
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/serialization/GeometryDeserializer.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde;
+
+import java.io.IOException;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.GeometryParser;
+import org.locationtech.jts.geom.Geometry;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.ObjectCodec;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonDeserializer;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A custom Jackson deserializer for JTS Geometry objects.
+ * This deserializer translates JSON structures into JTS Geometry instances using a specified
+ * {@link GeometryParser}. It supports generic geometry types, allowing for flexible deserialization
+ * of various specific types of geometries such as Point, LineString, Polygon, etc.
+ * The deserializer relies on a geometry parser which must be provided during instantiation.
+ * The parser is responsible for converting a JSON node into a corresponding JTS Geometry object.
+ * Usage:
+ * This deserializer is registered in the Jackson JTS
+ * module {@link org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule} and is used to configure an
+ * ObjectMapper to support JTS geometries.
+ */
+public class GeometryDeserializer<T extends Geometry> extends JsonDeserializer<T> {
+
+    private GeometryParser<T> geometryParser;
+
+    public GeometryDeserializer(GeometryParser<T> geometryParser) {
+        this.geometryParser = geometryParser;
+    }
+
+    /**
+     * Deserializes a JSON node into a JTS Geometry object.
+     * The JSON node is processed by the configured GeometryParser to produce the Geometry instance.
+     *
+     * @param jsonParser the Jackson parser reading the JSON content
+     * @param deserializationContext the Jackson deserialization context
+     * @return the deserialized JTS Geometry object
+     * @throws IOException if there is an issue in reading or parsing the JSON node
+     */
+    @Override
+    public T deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
+        ObjectCodec oc = jsonParser.getCodec();
+        JsonNode root = oc.readTree(jsonParser);
+        return geometryParser.geometryFromJson(root);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java
new file mode 100644
index 0000000..3207afb
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/serialization/GeometrySerializer.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
+import org.locationtech.jts.geom.MultiPoint;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Point;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+
+/**
+ * A custom Jackson serializer for JTS Geometry objects that translates these objects into their GeoJSON representations.
+ * This class supports serialization for all primary JTS geometry types including Point, LineString, Polygon, and their
+ * respective collections such as MultiPoint, MultiLineString, MultiPolygon, and GeometryCollection.
+ * It handles complex geometries by delegating to specific methods based on the instance type of the geometry object,
+ * ensuring that each geometry type is correctly represented according to the GeoJSON standard.
+ */
+public class GeometrySerializer extends JsonSerializer<Geometry> {
+
+    @Override
+    public void serialize(Geometry value, JsonGenerator jsonGenerator, SerializerProvider provider) throws IOException {
+        writeGeometry(jsonGenerator, value);
+    }
+
+    /**
+     * Writes the geometry object to the JsonGenerator. This method determines the type of the geometry
+     * and calls the appropriate method to handle the serialization.
+     *
+     * @param jsonGenerator the JsonGenerator to use for writing the GeoJSON
+     * @param value the Geometry object to serialize
+     * @throws IOException if an input/output error occurs
+     */
+    public void writeGeometry(JsonGenerator jsonGenerator, Geometry value) throws IOException {
+        if (value instanceof Polygon) {
+            writePolygon(jsonGenerator, (Polygon) value);
+
+        } else if (value instanceof Point) {
+            writePoint(jsonGenerator, (Point) value);
+
+        } else if (value instanceof MultiPoint) {
+            writeMultiPoint(jsonGenerator, (MultiPoint) value);
+
+        } else if (value instanceof MultiPolygon) {
+            writeMultiPolygon(jsonGenerator, (MultiPolygon) value);
+
+        } else if (value instanceof LineString) {
+            writeLineString(jsonGenerator, (LineString) value);
+
+        } else if (value instanceof MultiLineString) {
+            writeMultiLineString(jsonGenerator, (MultiLineString) value);
+
+        } else if (value instanceof GeometryCollection) {
+            writeGeometryCollection(jsonGenerator, (GeometryCollection) value);
+
+        } else {
+            throw new JsonMappingException(jsonGenerator,
+                    "Geometry type " + value.getClass().getName() + " cannot be serialized as GeoJSON."
+                            + "Supported types are: "
+                            + Arrays.asList(Point.class.getName(), LineString.class.getName(), Polygon.class.getName(),
+                                    MultiPoint.class.getName(), MultiLineString.class.getName(),
+                                    MultiPolygon.class.getName(), GeometryCollection.class.getName()));
+        }
+    }
+
+    private void writeGeometryCollection(JsonGenerator jsonGenerator, GeometryCollection value) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.GEOMETRY_COLLECTION);
+        jsonGenerator.writeArrayFieldStart(GeoJsonConstants.GEOMETRIES);
+
+        for (int i = 0; i != value.getNumGeometries(); ++i) {
+            writeGeometry(jsonGenerator, value.getGeometryN(i));
+        }
+
+        jsonGenerator.writeEndArray();
+        jsonGenerator.writeEndObject();
+    }
+
+    private void writeMultiPoint(JsonGenerator jsonGenerator, MultiPoint value) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.MULTI_POINT);
+        jsonGenerator.writeArrayFieldStart(GeoJsonConstants.COORDINATES);
+
+        for (int i = 0; i != value.getNumGeometries(); ++i) {
+            writePointCoordinates(jsonGenerator, (Point) value.getGeometryN(i));
+        }
+
+        jsonGenerator.writeEndArray();
+        jsonGenerator.writeEndObject();
+    }
+
+    private void writeMultiLineString(JsonGenerator jsonGenerator, MultiLineString value) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.MULTI_LINE_STRING);
+        jsonGenerator.writeArrayFieldStart(GeoJsonConstants.COORDINATES);
+
+        for (int i = 0; i != value.getNumGeometries(); ++i) {
+            writeLineStringCoordinates(jsonGenerator, (LineString) value.getGeometryN(i));
+        }
+
+        jsonGenerator.writeEndArray();
+        jsonGenerator.writeEndObject();
+    }
+
+    @Override
+    public Class<Geometry> handledType() {
+        return Geometry.class;
+    }
+
+    private void writeMultiPolygon(JsonGenerator jsonGenerator, MultiPolygon value) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.MULTI_POLYGON);
+        jsonGenerator.writeArrayFieldStart(GeoJsonConstants.COORDINATES);
+
+        for (int i = 0; i != value.getNumGeometries(); ++i) {
+            writePolygonCoordinates(jsonGenerator, (Polygon) value.getGeometryN(i));
+        }
+
+        jsonGenerator.writeEndArray();
+        jsonGenerator.writeEndObject();
+    }
+
+    private void writePolygon(JsonGenerator jsonGenerator, Polygon value) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.POLYGON);
+        jsonGenerator.writeFieldName(GeoJsonConstants.COORDINATES);
+        writePolygonCoordinates(jsonGenerator, value);
+
+        jsonGenerator.writeEndObject();
+    }
+
+    private void writePolygonCoordinates(JsonGenerator jsonGenerator, Polygon value) throws IOException {
+        jsonGenerator.writeStartArray();
+        writeLineStringCoordinates(jsonGenerator, value.getExteriorRing());
+
+        for (int i = 0; i < value.getNumInteriorRing(); ++i) {
+            writeLineStringCoordinates(jsonGenerator, value.getInteriorRingN(i));
+        }
+        jsonGenerator.writeEndArray();
+    }
+
+    private void writeLineStringCoordinates(JsonGenerator jsonGenerator, LineString ring) throws IOException {
+        jsonGenerator.writeStartArray();
+        for (int i = 0; i != ring.getNumPoints(); ++i) {
+            Point p = ring.getPointN(i);
+            writePointCoordinates(jsonGenerator, p);
+        }
+        jsonGenerator.writeEndArray();
+    }
+
+    private void writeLineString(JsonGenerator jsonGenerator, LineString lineString) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.LINE_STRING);
+        jsonGenerator.writeFieldName(GeoJsonConstants.COORDINATES);
+        writeLineStringCoordinates(jsonGenerator, lineString);
+        jsonGenerator.writeEndObject();
+    }
+
+    private void writePoint(JsonGenerator jsonGenerator, Point p) throws IOException {
+        jsonGenerator.writeStartObject();
+        jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.POINT);
+        jsonGenerator.writeFieldName(GeoJsonConstants.COORDINATES);
+        writePointCoordinates(jsonGenerator, p);
+        jsonGenerator.writeEndObject();
+    }
+
+    private void writePointCoordinates(JsonGenerator jsonGenerator, Point p) throws IOException {
+        jsonGenerator.writeStartArray();
+
+        writeFormattedNumber(jsonGenerator, p.getCoordinate().x);
+        writeFormattedNumber(jsonGenerator, p.getCoordinate().y);
+
+        if (!Double.isNaN(p.getCoordinate().z)) {
+            writeFormattedNumber(jsonGenerator, p.getCoordinate().z);
+        }
+
+        if (p.getCoordinate() instanceof CoordinateXYZM) {
+            double m = p.getCoordinate().getM();
+            writeFormattedNumber(jsonGenerator, m);
+        }
+        jsonGenerator.writeEndArray();
+    }
+
+    private void writeFormattedNumber(JsonGenerator jsonGenerator, double value) throws IOException {
+        if ((value == Math.floor(value)) && !Double.isInfinite(value)) {
+            jsonGenerator.writeNumber((int) value);
+        } else {
+            jsonGenerator.writeNumber(value);
+        }
+    }
+
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/CSVPrinterFactoryProvider.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/CSVPrinterFactoryProvider.java
index b8201ae..322b3e6 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/CSVPrinterFactoryProvider.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/CSVPrinterFactoryProvider.java
@@ -18,6 +18,15 @@
  */
 package org.apache.asterix.formats.nontagged;
 
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_DELIMITER;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_ESCAPE;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_FORCE_QUOTE;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_NULL;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_QUOTE;
+
+import java.util.Collections;
+import java.util.Map;
+
 import org.apache.asterix.dataflow.data.nontagged.printers.adm.ShortWithoutTypeInfoPrinterFactory;
 import org.apache.asterix.dataflow.data.nontagged.printers.csv.ABooleanPrinterFactory;
 import org.apache.asterix.dataflow.data.nontagged.printers.csv.ACirclePrinterFactory;
@@ -52,12 +61,26 @@
 import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 import org.apache.hyracks.algebricks.data.IPrinterFactory;
 import org.apache.hyracks.algebricks.data.IPrinterFactoryProvider;
+import org.apache.hyracks.api.exceptions.SourceLocation;
 
 public class CSVPrinterFactoryProvider implements IPrinterFactoryProvider {
+    private ARecordType itemType;
+    private Map<String, String> configuration;
+    private SourceLocation sourceLocation;
 
-    public static final CSVPrinterFactoryProvider INSTANCE = new CSVPrinterFactoryProvider();
+    public static final CSVPrinterFactoryProvider INSTANCE =
+            new CSVPrinterFactoryProvider(null, Collections.emptyMap(), null);
 
-    private CSVPrinterFactoryProvider() {
+    public static final CSVPrinterFactoryProvider createInstance(ARecordType itemType,
+            Map<String, String> configuration, SourceLocation sourceLocation) {
+        return new CSVPrinterFactoryProvider(itemType, configuration, sourceLocation);
+    }
+
+    private CSVPrinterFactoryProvider(ARecordType itemType, Map<String, String> configuration,
+            SourceLocation sourceLocation) {
+        this.itemType = itemType;
+        this.configuration = configuration;
+        this.sourceLocation = sourceLocation;
     }
 
     @Override
@@ -76,7 +99,7 @@
                     return AInt64PrinterFactory.INSTANCE;
                 case MISSING:
                 case NULL:
-                    return ANullPrinterFactory.INSTANCE;
+                    ANullPrinterFactory.createInstance(configuration.get(KEY_NULL));
                 case BOOLEAN:
                     return ABooleanPrinterFactory.INSTANCE;
                 case FLOAT:
@@ -110,13 +133,15 @@
                 case RECTANGLE:
                     return ARectanglePrinterFactory.INSTANCE;
                 case STRING:
-                    return AStringPrinterFactory.INSTANCE;
+                    return AStringPrinterFactory.createInstance(configuration.get(KEY_QUOTE),
+                            configuration.get(KEY_FORCE_QUOTE), configuration.get(KEY_ESCAPE),
+                            configuration.get(KEY_DELIMITER));
                 case OBJECT:
-                    return new ARecordPrinterFactory((ARecordType) type);
+                    return new ARecordPrinterFactory((ARecordType) type, itemType, configuration);
                 case ARRAY:
-                    throw new NotImplementedException("'Orderedlist' type unsupported for CSV output");
+                    throw new NotImplementedException("'OrderedList' type unsupported for CSV output");
                 case MULTISET:
-                    throw new NotImplementedException("'Unorderedlist' type unsupported for CSV output");
+                    throw new NotImplementedException("'UnorderedList' type unsupported for CSV output");
                 case UNION:
                     if (((AUnionType) type).isUnknownableType()) {
                         return new AOptionalFieldPrinterFactory((AUnionType) type);
@@ -142,7 +167,7 @@
                     break;
             }
         }
-        return AObjectPrinterFactory.INSTANCE;
+        return AObjectPrinterFactory.createInstance(itemType, configuration);
 
     }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java
index 84a2e81..c9d66b4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java
@@ -20,22 +20,23 @@
 
 import java.io.IOException;
 
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
+import org.locationtech.jts.geom.Geometry;
 
-import com.esri.core.geometry.ogc.OGCGeometry;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 
 public class AGeometry implements IAObject {
 
-    protected OGCGeometry geometry;
+    protected Geometry geometry;
 
-    public AGeometry(OGCGeometry geometry) {
+    public AGeometry(Geometry geometry) {
         this.geometry = geometry;
     }
 
-    public OGCGeometry getGeometry() {
+    public Geometry getGeometry() {
         return geometry;
     }
 
@@ -67,11 +68,13 @@
     @Override
     public ObjectNode toJSON() {
         ObjectMapper om = new ObjectMapper();
-        ObjectNode json = null;
+        om.registerModule(new JtsModule());
+        ObjectNode json;
         try {
-            json = (ObjectNode) om.readTree(geometry.asGeoJson());
+            String geoJson = om.writeValueAsString(geometry);
+            json = (ObjectNode) om.readTree(geoJson);
         } catch (IOException e) {
-            throw new RuntimeException(e);
+            return om.createObjectNode();
         }
         return json;
     }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java
index 346d68a..925a0c9 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java
@@ -18,29 +18,32 @@
  */
 package org.apache.asterix.om.base;
 
-import com.esri.core.geometry.OGCStructure;
-import com.esri.core.geometry.OperatorImportFromWkt;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
 
 public class AMutableGeometry extends AGeometry {
+    private Geometry geometry;
+    private final WKTReader wktReader = new WKTReader();
 
-    private OperatorImportFromWkt wktImporter;
-
-    public AMutableGeometry(OGCGeometry geom) {
+    public AMutableGeometry(Geometry geom) {
         super(geom);
-        wktImporter = OperatorImportFromWkt.local();
-    }
-
-    public void setValue(OGCGeometry geom) {
         this.geometry = geom;
     }
 
-    public void parseWKT(String wkt) {
-        OGCStructure structure;
+    public void setValue(Geometry geom) {
+        this.geometry = geom;
+    }
 
-        structure = wktImporter.executeOGC(WktImportFlags.wktImportNonTrusted, wkt, null);
-        this.geometry = OGCGeometry.createFromOGCStructure(structure, SpatialReference.create(4326));
+    public Geometry getGeometry() {
+        return this.geometry;
+    }
+
+    public void parseWKT(String wkt) {
+        try {
+            this.geometry = wktReader.read(wkt);
+        } catch (ParseException e) {
+            throw new RuntimeException(e);
+        }
     }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/AbstractFieldNamesDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/AbstractFieldNamesDictionary.java
new file mode 100644
index 0000000..b2cd223
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/AbstractFieldNamesDictionary.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import java.io.DataInput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.hyracks.util.string.UTF8StringReader;
+import org.apache.hyracks.util.string.UTF8StringWriter;
+
+public abstract class AbstractFieldNamesDictionary implements IFieldNamesDictionary {
+    /**
+     * Dummy field name used to add a column when encountering empty object
+     */
+    public static final IValueReference DUMMY_FIELD_NAME;
+    public static final int DUMMY_FIELD_NAME_INDEX = -1;
+
+    //For declared fields
+    private final AMutableString mutableString;
+    private final AStringSerializerDeserializer stringSerDer;
+
+    static {
+        VoidPointable dummy = new VoidPointable();
+        dummy.set(new byte[0], 0, 0);
+        DUMMY_FIELD_NAME = dummy;
+    }
+
+    AbstractFieldNamesDictionary() {
+        mutableString = new AMutableString("");
+        stringSerDer = new AStringSerializerDeserializer(new UTF8StringWriter(), new UTF8StringReader());
+    }
+
+    public static IFieldNamesDictionary create() {
+        return new FieldNamesTrieDictionary();
+    }
+
+    public static IFieldNamesDictionary deserialize(DataInput input) throws IOException {
+        return FieldNamesTrieDictionary.deserialize(input);
+    }
+
+    static ArrayBackedValueStorage creatFieldName(IValueReference fieldName) throws HyracksDataException {
+        ArrayBackedValueStorage copy = new ArrayBackedValueStorage(fieldName.getLength());
+        copy.append(fieldName);
+        return copy;
+    }
+
+    protected ArrayBackedValueStorage creatFieldName(String fieldName) throws HyracksDataException {
+        ArrayBackedValueStorage serializedFieldName = new ArrayBackedValueStorage();
+        serializeFieldName(fieldName, serializedFieldName);
+        return serializedFieldName;
+    }
+
+    protected void serializeFieldName(String fieldName, ArrayBackedValueStorage storage) throws HyracksDataException {
+        mutableString.setValue(fieldName);
+        stringSerDer.serialize(mutableString, storage.getDataOutput());
+    }
+
+    static void deserializeFieldNames(DataInput input, List<IValueReference> fieldNames, int numberOfFieldNames)
+            throws IOException {
+        for (int i = 0; i < numberOfFieldNames; i++) {
+            int length = input.readInt();
+            ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage(length);
+            fieldName.setSize(length);
+            input.readFully(fieldName.getByteArray(), 0, length);
+            fieldNames.add(fieldName);
+        }
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/ByteToNodeMap.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/ByteToNodeMap.java
new file mode 100644
index 0000000..7bfae0b
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/ByteToNodeMap.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Collection;
+
+import it.unimi.dsi.fastutil.objects.ObjectArrays;
+
+final class ByteToNodeMap {
+    private static final TrieNode[] EMPTY = new TrieNode[0];
+    private TrieNode[] children;
+    private int numberOfChildren;
+
+    ByteToNodeMap() {
+        children = EMPTY;
+        numberOfChildren = 0;
+    }
+
+    private ByteToNodeMap(TrieNode[] children, int numberOfChildren) {
+        this.children = children;
+        this.numberOfChildren = numberOfChildren;
+    }
+
+    void put(byte key, TrieNode node) {
+        int index = Byte.toUnsignedInt(key);
+        ensure(index);
+        children[index] = node;
+        numberOfChildren++;
+    }
+
+    TrieNode get(byte key) {
+        int index = Byte.toUnsignedInt(key);
+        if (index < children.length) {
+            return children[index];
+        }
+
+        return null;
+    }
+
+    private void ensure(int index) {
+        if (index >= children.length) {
+            children = ObjectArrays.grow(children, index + 1, children.length);
+        }
+    }
+
+    void addAllChildren(Collection<TrieNode> collection) {
+        int addedChildren = 0;
+        for (int i = 0; i < children.length && addedChildren < numberOfChildren; i++) {
+            TrieNode child = children[i];
+            if (child != null) {
+                collection.add(children[i]);
+                addedChildren++;
+            }
+        }
+    }
+
+    void serialize(DataOutput out) throws IOException {
+        out.writeInt(numberOfChildren);
+        out.writeInt(children.length);
+        int addedChildren = 0;
+        for (int i = 0; i < children.length && addedChildren < numberOfChildren; i++) {
+            TrieNode child = children[i];
+            if (child != null) {
+                out.writeInt(i);
+                child.serialize(out);
+                addedChildren++;
+            }
+        }
+    }
+
+    static ByteToNodeMap deserialize(DataInput in) throws IOException {
+        int numberOfChildren = in.readInt();
+        int length = in.readInt();
+        TrieNode[] children = length == 0 ? EMPTY : new TrieNode[length];
+        for (int i = 0; i < numberOfChildren; i++) {
+            int index = in.readInt();
+            children[index] = TrieNode.deserialize(in);
+        }
+
+        return new ByteToNodeMap(children, numberOfChildren);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNameTrie.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNameTrie.java
new file mode 100644
index 0000000..5333af5
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNameTrie.java
@@ -0,0 +1,296 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import static org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary.deserializeFieldNames;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Queue;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.util.string.UTF8StringUtil;
+
+public class FieldNameTrie {
+    private static final int VERSION = 1;
+    private final LookupState lookupState;
+
+    private final List<IValueReference> fieldNames;
+    private TrieNode rootNode;
+
+    public FieldNameTrie() {
+        this(new ArrayList<>());
+    }
+
+    private FieldNameTrie(List<IValueReference> fieldNames) {
+        this.fieldNames = fieldNames;
+        this.rootNode = new TrieNode();
+        lookupState = new LookupState();
+    }
+
+    public int insert(IValueReference fieldName) throws HyracksDataException {
+        int presentIndex = lookup(fieldName);
+        if (presentIndex == TrieNode.NOT_FOUND_INDEX) {
+            presentIndex = hookup(FieldNamesTrieDictionary.creatFieldName(fieldName));
+        }
+        return presentIndex;
+    }
+
+    public int lookup(IValueReference fieldName) {
+        //noinspection DuplicatedCode
+        int len = UTF8StringUtil.getUTFLength(fieldName.getByteArray(), fieldName.getStartOffset());
+        int start = fieldName.getStartOffset() + UTF8StringUtil.getNumBytesToStoreLength(len);
+        byte[] bytes = fieldName.getByteArray();
+
+        TrieNode searchNode = rootNode;
+        TrieNode prevNode = searchNode;
+
+        int byteIndex = start;
+        // previousByteIndex should point to the first byte to be compared
+        // when inserting the fieldName
+        int previousByteIndex = byteIndex;
+
+        int lastIndex = (start + len - 1);
+        while (byteIndex <= lastIndex) {
+            byte b = bytes[byteIndex];
+
+            TrieNode nextNode = searchNode.getChild(b);
+            if (nextNode == null) {
+                // saving state in case hookup is requested
+                lookupState.setState(prevNode, start, previousByteIndex, len);
+                return TrieNode.NOT_FOUND_INDEX;
+            }
+            // if the node exists, then compare the remaining byte seq.
+            prevNode = searchNode;
+            searchNode = nextNode;
+
+            if (searchNode.getLength() > 1) { // first byte will be same as byteIndex
+                // compare the stored sequence.
+                int fieldBytesLeftToCompare = lastIndex - byteIndex + 1;
+                // if the stored sequence in node is greater than the input field's
+                // byte to compare, then the result won't be there.
+                if (fieldBytesLeftToCompare < searchNode.getLength()) {
+                    // saving state in case hookup is requested
+                    lookupState.setState(prevNode, start, byteIndex, len);
+                    return TrieNode.NOT_FOUND_INDEX;
+                }
+
+                int c = 0;
+                byte[] storedFieldBytes = fieldNames.get(searchNode.getIndex()).getByteArray();
+                int storedFieldStart = searchNode.getStart();
+                previousByteIndex = byteIndex;
+                while (c < searchNode.getLength()) {
+                    if (bytes[byteIndex] != storedFieldBytes[storedFieldStart + c]) {
+                        // saving state in case hookup is requested
+                        // will restart from oldByteIndex, to make logic simpler.
+                        // other way could have been to store the splitIndex.
+                        lookupState.setState(prevNode, start, previousByteIndex, len);
+                        return TrieNode.NOT_FOUND_INDEX;
+                    }
+                    c++;
+                    byteIndex++;
+                }
+            } else {
+                previousByteIndex = byteIndex;
+                byteIndex++;
+            }
+        }
+
+        // saving state in case hookup is requested
+        lookupState.setState(prevNode, start, previousByteIndex, len);
+        return searchNode.isEndOfField() ? searchNode.getIndex() : TrieNode.NOT_FOUND_INDEX;
+    }
+
+    private int hookup(IValueReference fieldName) {
+        // since lookup operation always precedes a hookup operation
+        // we can use the saved state to start hookup.
+        int len = lookupState.getFieldLength();
+        TrieNode searchNode = lookupState.getLastNode();
+
+        // resume from the stored node.
+        int bytesToStoreLength = UTF8StringUtil.getNumBytesToStoreLength(len);
+
+        int byteIndex = lookupState.getRelativeOffsetFromStart() + bytesToStoreLength;
+        byte[] bytes = fieldName.getByteArray();
+        int lastIndex = (bytesToStoreLength + len - 1);
+        while (byteIndex <= lastIndex) {
+            byte b = bytes[byteIndex];
+            TrieNode nextNode = searchNode.getChild(b);
+            if (nextNode == null) {
+                // since there no such node, then create a node, and put
+                // rest bytes in the nodes.
+                TrieNode childNode = new TrieNode();
+                // first insert, then add the field
+                // start from byteIndex with newLength.
+                // newLength = lastIndex - byteIndex + 1
+                childNode.setIndex(fieldNames.size(), byteIndex, lastIndex - byteIndex + 1, bytesToStoreLength);
+                childNode.setIsEndOfField(true);
+                fieldNames.add(fieldName);
+
+                searchNode.putChild(b, childNode);
+                return childNode.getIndex();
+            }
+            // if node exists, compare the remaining byte seq
+            searchNode = nextNode;
+
+            if (searchNode.getLength() > 1) {
+                // compare the byte seq
+                int c = 0;
+                int oldByteIndex = byteIndex;
+
+                IValueReference storedFieldName = fieldNames.get(searchNode.getIndex());
+                byte[] storedFieldBytes = storedFieldName.getByteArray();
+                int storedFieldStart = searchNode.getStart();
+                while (c < Math.min(searchNode.getLength(), lastIndex - oldByteIndex + 1)) {
+                    if (bytes[byteIndex] != storedFieldBytes[storedFieldStart + c]) {
+                        break;
+                    }
+                    c++;
+                    byteIndex++;
+                }
+
+                // from c & byteIndex, things are not matching,
+                // split into two nodes,
+                // one from (c, ...) -> handled below
+                // other from (byteIndex, ...) -> handled in the next iteration, as byteIndex will be absent.
+
+                // handling (c, ..)
+                int leftToSplitForCurrentNode = searchNode.getLength() - c;
+                if (leftToSplitForCurrentNode > 0) {
+                    searchNode.split(storedFieldName, c);
+                }
+            } else {
+                byteIndex++;
+            }
+        }
+
+        // since the node is already present,
+        // point it to the current fieldName, and update the start and length based on the fieldName
+        // prefix would be the same
+        // find absolute starting point in the current fieldName
+        int diff = searchNode.getStart() - searchNode.getBytesToStoreLength();
+        // since hookup happens on a new fieldName, hence start will be bytesToStoreLength
+        searchNode.setIndex(fieldNames.size(), bytesToStoreLength + diff, searchNode.getLength(), bytesToStoreLength);
+        searchNode.setIsEndOfField(true);
+        fieldNames.add(fieldName);
+        return searchNode.getIndex();
+    }
+
+    public void serialize(DataOutput out) throws IOException {
+        out.writeInt(VERSION);
+
+        // serialize fieldNames
+        out.writeInt(fieldNames.size());
+        for (IValueReference fieldName : fieldNames) {
+            out.writeInt(fieldName.getLength());
+            out.write(fieldName.getByteArray(), fieldName.getStartOffset(), fieldName.getLength());
+        }
+
+        rootNode.serialize(out);
+    }
+
+    public List<IValueReference> getFieldNames() {
+        return fieldNames;
+    }
+
+    public IValueReference getFieldName(int fieldIndex) {
+        return fieldNames.get(fieldIndex);
+    }
+
+    public void clear() {
+        rootNode = null;
+        fieldNames.clear();
+    }
+
+    public static FieldNameTrie deserialize(DataInput in) throws IOException {
+        int version = in.readInt();
+        if (version == VERSION) {
+            return deserializeV1(in);
+        }
+        throw new IllegalStateException("Unsupported version: " + version);
+    }
+
+    private static FieldNameTrie deserializeV1(DataInput in) throws IOException {
+        int numberOfFieldNames = in.readInt();
+
+        List<IValueReference> fieldNames = new ArrayList<>();
+        deserializeFieldNames(in, fieldNames, numberOfFieldNames);
+
+        FieldNameTrie newTrie = new FieldNameTrie(fieldNames);
+        newTrie.rootNode = TrieNode.deserialize(in);
+
+        return newTrie;
+    }
+
+    @Override
+    public String toString() {
+        TrieNode currentNode = rootNode;
+        Queue<TrieNode> queue = new ArrayDeque<>();
+        currentNode.getChildren().addAllChildren(queue);
+        StringBuilder treeBuilder = new StringBuilder();
+        while (!queue.isEmpty()) {
+            int len = queue.size();
+            for (int i = 0; i < len; i++) {
+                TrieNode node = queue.poll();
+                assert node != null;
+                byte[] bytes = fieldNames.get(node.getIndex()).getByteArray();
+                for (int j = 0; j < node.getLength(); j++) {
+                    treeBuilder.append((char) bytes[node.getStart() + j]);
+                }
+                treeBuilder.append("(").append(node.isEndOfField()).append(")");
+                if (i != len - 1) {
+                    treeBuilder.append(" | ");
+                }
+
+                node.getChildren().addAllChildren(queue);
+            }
+            treeBuilder.append("\n");
+        }
+        return treeBuilder.toString();
+    }
+
+    private static class LookupState {
+        private TrieNode lastNode;
+        private int relativeOffsetFromStart;
+        private int fieldLength;
+
+        public void setState(TrieNode lastNode, int startIndex, int continuationByteIndex, int fieldLength) {
+            this.lastNode = lastNode;
+            this.relativeOffsetFromStart = continuationByteIndex - startIndex;
+            this.fieldLength = fieldLength;
+        }
+
+        public TrieNode getLastNode() {
+            return lastNode;
+        }
+
+        public int getRelativeOffsetFromStart() {
+            return relativeOffsetFromStart;
+        }
+
+        public int getFieldLength() {
+            return fieldLength;
+        }
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesHashDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesHashDictionary.java
new file mode 100644
index 0000000..591f322
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesHashDictionary.java
@@ -0,0 +1,199 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hyracks.api.dataflow.value.IBinaryHashFunction;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+
+import it.unimi.dsi.fastutil.ints.Int2IntMap;
+import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
+import it.unimi.dsi.fastutil.objects.Object2IntMap;
+import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
+
+/**
+ * @deprecated Use {@link FieldNamesTrieDictionary}
+ */
+@Deprecated
+public class FieldNamesHashDictionary extends AbstractFieldNamesDictionary {
+    //For both declared and inferred fields
+    private final List<IValueReference> fieldNames;
+    private final Object2IntMap<String> declaredFieldNamesToIndexMap;
+    private final Int2IntMap hashToFieldNameIndexMap;
+    private final IBinaryHashFunction fieldNameHashFunction;
+
+    //For lookups
+    private final ArrayBackedValueStorage lookupStorage;
+
+    public FieldNamesHashDictionary() {
+        this(new ArrayList<>(), new Object2IntOpenHashMap<>(), new Int2IntOpenHashMap());
+    }
+
+    private FieldNamesHashDictionary(List<IValueReference> fieldNames,
+            Object2IntMap<String> declaredFieldNamesToIndexMap, Int2IntMap hashToFieldNameIndexMap) {
+        super();
+        this.fieldNames = fieldNames;
+        this.declaredFieldNamesToIndexMap = declaredFieldNamesToIndexMap;
+        this.hashToFieldNameIndexMap = hashToFieldNameIndexMap;
+        fieldNameHashFunction =
+                new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY).createBinaryHashFunction();
+        lookupStorage = new ArrayBackedValueStorage();
+    }
+
+    @Override
+    public List<IValueReference> getFieldNames() {
+        return fieldNames;
+    }
+
+    //TODO solve collision (they're so rare that I haven't seen any)
+    @Override
+    public int getOrCreateFieldNameIndex(IValueReference fieldName) throws HyracksDataException {
+        if (fieldName == DUMMY_FIELD_NAME) {
+            return DUMMY_FIELD_NAME_INDEX;
+        }
+
+        int hash = getHash(fieldName);
+        if (!hashToFieldNameIndexMap.containsKey(hash)) {
+            int index = addFieldName(creatFieldName(fieldName), hash);
+            hashToFieldNameIndexMap.put(hash, index);
+            return index;
+        }
+        return hashToFieldNameIndexMap.get(hash);
+    }
+
+    @Override
+    public int getOrCreateFieldNameIndex(String fieldName) throws HyracksDataException {
+        if (!declaredFieldNamesToIndexMap.containsKey(fieldName)) {
+            IValueReference serializedFieldName = creatFieldName(fieldName);
+            int hash = getHash(serializedFieldName);
+            int index = addFieldName(serializedFieldName, hash);
+            declaredFieldNamesToIndexMap.put(fieldName, index);
+            return index;
+        }
+        return declaredFieldNamesToIndexMap.getInt(fieldName);
+    }
+
+    @Override
+    public int getFieldNameIndex(String fieldName) throws HyracksDataException {
+        lookupStorage.reset();
+        serializeFieldName(fieldName, lookupStorage);
+        return hashToFieldNameIndexMap.getOrDefault(getHash(lookupStorage), -1);
+    }
+
+    private int getHash(IValueReference fieldName) throws HyracksDataException {
+        byte[] object = fieldName.getByteArray();
+        int start = fieldName.getStartOffset();
+        int length = fieldName.getLength();
+
+        return fieldNameHashFunction.hash(object, start, length);
+    }
+
+    private int addFieldName(IValueReference fieldName, int hash) {
+        int index = fieldNames.size();
+        hashToFieldNameIndexMap.put(hash, index);
+        fieldNames.add(fieldName);
+        return index;
+    }
+
+    @Override
+    public IValueReference getFieldName(int index) {
+        if (index == DUMMY_FIELD_NAME_INDEX) {
+            return DUMMY_FIELD_NAME;
+        }
+        return fieldNames.get(index);
+    }
+
+    @Override
+    public void serialize(DataOutput output) throws IOException {
+        output.writeInt(fieldNames.size());
+        for (IValueReference fieldName : fieldNames) {
+            output.writeInt(fieldName.getLength());
+            output.write(fieldName.getByteArray(), fieldName.getStartOffset(), fieldName.getLength());
+        }
+
+        output.writeInt(declaredFieldNamesToIndexMap.size());
+        for (Object2IntMap.Entry<String> declaredFieldIndex : declaredFieldNamesToIndexMap.object2IntEntrySet()) {
+            output.writeUTF(declaredFieldIndex.getKey());
+            output.writeInt(declaredFieldIndex.getIntValue());
+        }
+
+        for (Int2IntMap.Entry hashIndex : hashToFieldNameIndexMap.int2IntEntrySet()) {
+            output.writeInt(hashIndex.getIntKey());
+            output.writeInt(hashIndex.getIntValue());
+        }
+    }
+
+    public static FieldNamesHashDictionary deserialize(DataInput input) throws IOException {
+        int numberOfFieldNames = input.readInt();
+
+        List<IValueReference> fieldNames = new ArrayList<>();
+        deserializeFieldNames(input, fieldNames, numberOfFieldNames);
+
+        Object2IntMap<String> declaredFieldNamesToIndexMap = new Object2IntOpenHashMap<>();
+        deserializeDeclaredFieldNames(input, declaredFieldNamesToIndexMap);
+
+        Int2IntMap hashToFieldNameIndexMap = new Int2IntOpenHashMap();
+        deserializeHashToFieldNameIndex(input, hashToFieldNameIndexMap, numberOfFieldNames);
+
+        return new FieldNamesHashDictionary(fieldNames, declaredFieldNamesToIndexMap, hashToFieldNameIndexMap);
+    }
+
+    @Override
+    public void abort(DataInputStream input) throws IOException {
+        int numberOfFieldNames = input.readInt();
+
+        fieldNames.clear();
+        deserializeFieldNames(input, fieldNames, numberOfFieldNames);
+
+        declaredFieldNamesToIndexMap.clear();
+        deserializeDeclaredFieldNames(input, declaredFieldNamesToIndexMap);
+
+        hashToFieldNameIndexMap.clear();
+        deserializeHashToFieldNameIndex(input, hashToFieldNameIndexMap, numberOfFieldNames);
+    }
+
+    private static void deserializeDeclaredFieldNames(DataInput input,
+            Object2IntMap<String> declaredFieldNamesToIndexMap) throws IOException {
+        int numberOfDeclaredFieldNames = input.readInt();
+        for (int i = 0; i < numberOfDeclaredFieldNames; i++) {
+            String fieldName = input.readUTF();
+            int fieldNameIndex = input.readInt();
+            declaredFieldNamesToIndexMap.put(fieldName, fieldNameIndex);
+        }
+    }
+
+    private static void deserializeHashToFieldNameIndex(DataInput input, Int2IntMap hashToFieldNameIndexMap,
+            int numberOfFieldNames) throws IOException {
+        for (int i = 0; i < numberOfFieldNames; i++) {
+            int hash = input.readInt();
+            int fieldNameIndex = input.readInt();
+            hashToFieldNameIndexMap.put(hash, fieldNameIndex);
+        }
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesTrieDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesTrieDictionary.java
new file mode 100644
index 0000000..30e10d6
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesTrieDictionary.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+
+public class FieldNamesTrieDictionary extends AbstractFieldNamesDictionary {
+    private FieldNameTrie dictionary;
+    //For lookups
+    private final ArrayBackedValueStorage lookupStorage;
+
+    public FieldNamesTrieDictionary() {
+        this(new FieldNameTrie());
+    }
+
+    private FieldNamesTrieDictionary(FieldNameTrie dictionary) {
+        super();
+        this.dictionary = dictionary;
+        lookupStorage = new ArrayBackedValueStorage();
+    }
+
+    @Override
+    public List<IValueReference> getFieldNames() {
+        return dictionary.getFieldNames();
+    }
+
+    @Override
+    public int getOrCreateFieldNameIndex(IValueReference fieldName) throws HyracksDataException {
+        if (fieldName == DUMMY_FIELD_NAME) {
+            return DUMMY_FIELD_NAME_INDEX;
+        }
+
+        return dictionary.insert(fieldName);
+    }
+
+    @Override
+    public int getOrCreateFieldNameIndex(String fieldName) throws HyracksDataException {
+        return getOrCreateFieldNameIndex(creatFieldName(fieldName));
+    }
+
+    @Override
+    public int getFieldNameIndex(String fieldName) throws HyracksDataException {
+        lookupStorage.reset();
+        serializeFieldName(fieldName, lookupStorage);
+        return dictionary.lookup(lookupStorage);
+    }
+
+    @Override
+    public IValueReference getFieldName(int index) {
+        if (index == DUMMY_FIELD_NAME_INDEX) {
+            return DUMMY_FIELD_NAME;
+        }
+        return dictionary.getFieldName(index);
+    }
+
+    @Override
+    public void serialize(DataOutput output) throws IOException {
+        dictionary.serialize(output);
+    }
+
+    public static FieldNamesTrieDictionary deserialize(DataInput input) throws IOException {
+        FieldNameTrie fieldNameTrie = FieldNameTrie.deserialize(input);
+        return new FieldNamesTrieDictionary(fieldNameTrie);
+    }
+
+    @Override
+    public void abort(DataInputStream input) throws IOException {
+        dictionary.clear();
+        dictionary = FieldNameTrie.deserialize(input);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/IFieldNamesDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/IFieldNamesDictionary.java
new file mode 100644
index 0000000..ec494fa
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/IFieldNamesDictionary.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+
+/**
+ * methods for defining the fieldName dictionary
+ * which is used to encode a fieldName to an index.
+ */
+public interface IFieldNamesDictionary {
+    /**
+     * @return get all the inserted field names
+     */
+    List<IValueReference> getFieldNames();
+
+    /**
+     * @param fieldName fieldName byte array
+     * @return returns index if field exist, otherwise insert fieldName and return the new index
+     * @throws HyracksDataException
+     */
+    int getOrCreateFieldNameIndex(IValueReference fieldName) throws HyracksDataException;
+
+    /**
+     * @param fieldName fieldName string
+     * @return returns index if field exist, otherwise insert fieldName and return the new index
+     * @throws HyracksDataException
+     */
+    int getOrCreateFieldNameIndex(String fieldName) throws HyracksDataException;
+
+    /**
+     * @param fieldName
+     * @return index of the field if exists otherwise -1
+     * @throws HyracksDataException
+     */
+    int getFieldNameIndex(String fieldName) throws HyracksDataException;
+
+    /**
+     * @param index encoded index
+     * @return the fieldName present at the requested index
+     */
+    IValueReference getFieldName(int index);
+
+    /**
+     * serialize the dictionary
+     * @param output
+     * @throws IOException
+     */
+    void serialize(DataOutput output) throws IOException;
+
+    /**
+     * resetting and rebuilding the dictionary
+     * @param input
+     * @throws IOException
+     */
+    void abort(DataInputStream input) throws IOException;
+
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/TrieNode.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/TrieNode.java
new file mode 100644
index 0000000..9ea2978
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/TrieNode.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.dictionary;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hyracks.data.std.api.IValueReference;
+
+class TrieNode {
+    public static final int NOT_FOUND_INDEX = -1;
+
+    private ByteToNodeMap children;
+    private boolean isEndOfField;
+    private int index;
+    private int start; // includes the edges' byte
+    private int length; // includes the edges' byte
+    private int bytesToStoreLength;
+
+    TrieNode() {
+        this.children = new ByteToNodeMap();
+        index = NOT_FOUND_INDEX;
+    }
+
+    TrieNode(ByteToNodeMap children) {
+        this.children = children;
+        index = NOT_FOUND_INDEX;
+    }
+
+    public void setIndex(int index, int start, int length, int bytesToStoreLength) {
+        this.index = index;
+        this.start = start;
+        this.length = length;
+        this.bytesToStoreLength = bytesToStoreLength;
+    }
+
+    public void setIsEndOfField(boolean isEndOfField) {
+        this.isEndOfField = isEndOfField;
+    }
+
+    public TrieNode getChild(byte key) {
+        return children.get(key);
+    }
+
+    public void putChild(byte key, TrieNode child) {
+        children.put(key, child);
+    }
+
+    public ByteToNodeMap getChildren() {
+        return children;
+    }
+
+    public int getIndex() {
+        return index;
+    }
+
+    public int getStart() {
+        return start;
+    }
+
+    public int getLength() {
+        return length;
+    }
+
+    public int getBytesToStoreLength() {
+        return bytesToStoreLength;
+    }
+
+    public boolean isEndOfField() {
+        return isEndOfField;
+    }
+
+    public void reset() {
+        // since this object went to the new node.
+        children = new ByteToNodeMap();
+    }
+
+    public void split(IValueReference fieldName, int splitIndex) {
+        byte[] storedFieldBytes = fieldName.getByteArray();
+        byte splitByte = storedFieldBytes[start + splitIndex];
+        // something to be split, have to create a new node
+        // and do the linking.
+        TrieNode childNode = new TrieNode(children);
+
+        int leftToSplit = length - splitIndex;
+        childNode.setIndex(index, start + splitIndex, leftToSplit, bytesToStoreLength);
+        childNode.setIsEndOfField(isEndOfField);
+        // update the current search node
+        // new length would be 'c'
+        reset();
+        setIndex(index, start, splitIndex, bytesToStoreLength);
+        putChild(splitByte, childNode);
+        // since there was a split in searchNode, hence isEndOfField will be false.
+        setIsEndOfField(false);
+    }
+
+    public void serialize(DataOutput out) throws IOException {
+        // Serialize child first
+        children.serialize(out);
+        // serialize fields
+        out.writeBoolean(isEndOfField);
+        out.writeInt(index);
+        out.writeInt(start);
+        out.writeInt(length);
+        out.writeInt(bytesToStoreLength);
+    }
+
+    public static TrieNode deserialize(DataInput in) throws IOException {
+        ByteToNodeMap children = ByteToNodeMap.deserialize(in);
+        TrieNode node = new TrieNode(children);
+        node.isEndOfField = in.readBoolean();
+        node.index = in.readInt();
+        node.start = in.readInt();
+        node.length = in.readInt();
+        node.bytesToStoreLength = in.readInt();
+
+        return node;
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
index 57b842a..27f2160 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
@@ -82,6 +82,7 @@
 import org.apache.asterix.om.typecomputer.impl.CollectionToSequenceTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.ConcatNonNullTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.ConcatTypeComputer;
+import org.apache.asterix.om.typecomputer.impl.CountNTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.DoubleIfTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.FieldAccessByIndexResultType;
 import org.apache.asterix.om.typecomputer.impl.FieldAccessByNameResultType;
@@ -417,6 +418,7 @@
     public static final FunctionIdentifier LISTIFY = FunctionConstants.newAsterix("listify", 1);
     public static final FunctionIdentifier AVG = FunctionConstants.newAsterix("agg-avg", 1);
     public static final FunctionIdentifier COUNT = FunctionConstants.newAsterix("agg-count", 1);
+    public static final FunctionIdentifier COUNTN = FunctionConstants.newAsterix("agg-countn", 1);
     public static final FunctionIdentifier SUM = FunctionConstants.newAsterix("agg-sum", 1);
     public static final FunctionIdentifier LOCAL_SUM = FunctionConstants.newAsterix("agg-local-sum", 1);
     public static final FunctionIdentifier INTERMEDIATE_SUM = FunctionConstants.newAsterix("agg-intermediate-sum", 1);
@@ -482,6 +484,7 @@
     public static final FunctionIdentifier SCALAR_ARRAYAGG = FunctionConstants.newAsterix("arrayagg", 1);
     public static final FunctionIdentifier SCALAR_AVG = FunctionConstants.newAsterix("avg", 1);
     public static final FunctionIdentifier SCALAR_COUNT = FunctionConstants.newAsterix("count", 1);
+    public static final FunctionIdentifier SCALAR_COUNTN = FunctionConstants.newAsterix("countn", 1);
     public static final FunctionIdentifier SCALAR_SUM = FunctionConstants.newAsterix("sum", 1);
     public static final FunctionIdentifier SCALAR_MAX = FunctionConstants.newAsterix("max", 1);
     public static final FunctionIdentifier SCALAR_MIN = FunctionConstants.newAsterix("min", 1);
@@ -501,6 +504,7 @@
     // serializable aggregate functions
     public static final FunctionIdentifier SERIAL_AVG = FunctionConstants.newAsterix("avg-serial", 1);
     public static final FunctionIdentifier SERIAL_COUNT = FunctionConstants.newAsterix("count-serial", 1);
+    public static final FunctionIdentifier SERIAL_COUNTN = FunctionConstants.newAsterix("countn-serial", 1);
     public static final FunctionIdentifier SERIAL_SUM = FunctionConstants.newAsterix("sum-serial", 1);
     public static final FunctionIdentifier SERIAL_LOCAL_SUM = FunctionConstants.newAsterix("local-sum-serial", 1);
     public static final FunctionIdentifier SERIAL_INTERMEDIATE_SUM =
@@ -559,6 +563,8 @@
             FunctionConstants.newAsterix("arrayagg-distinct", 1);
     public static final FunctionIdentifier COUNT_DISTINCT = FunctionConstants.newAsterix("agg-count-distinct", 1);
     public static final FunctionIdentifier SCALAR_COUNT_DISTINCT = FunctionConstants.newAsterix("count-distinct", 1);
+    public static final FunctionIdentifier COUNTN_DISTINCT = FunctionConstants.newAsterix("agg-countn-distinct", 1);
+    public static final FunctionIdentifier SCALAR_COUNTN_DISTINCT = FunctionConstants.newAsterix("countn-distinct", 1);
     public static final FunctionIdentifier SUM_DISTINCT = FunctionConstants.newAsterix("agg-sum-distinct", 1);
     public static final FunctionIdentifier SCALAR_SUM_DISTINCT = FunctionConstants.newAsterix("sum-distinct", 1);
     public static final FunctionIdentifier AVG_DISTINCT = FunctionConstants.newAsterix("agg-avg-distinct", 1);
@@ -593,6 +599,7 @@
     public static final FunctionIdentifier INTERMEDIATE_SQL_AVG =
             FunctionConstants.newAsterix("intermediate-agg-sql-avg", 1);
     public static final FunctionIdentifier SQL_COUNT = FunctionConstants.newAsterix("agg-sql-count", 1);
+    public static final FunctionIdentifier SQL_COUNTN = FunctionConstants.newAsterix("agg-sql-countn", 1);
     public static final FunctionIdentifier SQL_SUM = FunctionConstants.newAsterix("agg-sql-sum", 1);
     public static final FunctionIdentifier LOCAL_SQL_SUM = FunctionConstants.newAsterix("agg-local-sql-sum", 1);
     public static final FunctionIdentifier INTERMEDIATE_SQL_SUM =
@@ -666,6 +673,7 @@
 
     public static final FunctionIdentifier SCALAR_SQL_AVG = FunctionConstants.newAsterix("sql-avg", 1);
     public static final FunctionIdentifier SCALAR_SQL_COUNT = FunctionConstants.newAsterix("sql-count", 1);
+    public static final FunctionIdentifier SCALAR_SQL_COUNTN = FunctionConstants.newAsterix("sql-countn", 1);
     public static final FunctionIdentifier SCALAR_SQL_SUM = FunctionConstants.newAsterix("sql-sum", 1);
     public static final FunctionIdentifier SCALAR_SQL_MAX = FunctionConstants.newAsterix("sql-max", 1);
     public static final FunctionIdentifier SCALAR_SQL_MIN = FunctionConstants.newAsterix("sql-min", 1);
@@ -681,6 +689,7 @@
     // serializable sql aggregate functions
     public static final FunctionIdentifier SERIAL_SQL_AVG = FunctionConstants.newAsterix("sql-avg-serial", 1);
     public static final FunctionIdentifier SERIAL_SQL_COUNT = FunctionConstants.newAsterix("sql-count-serial", 1);
+    public static final FunctionIdentifier SERIAL_SQL_COUNTN = FunctionConstants.newAsterix("sql-countn-serial", 1);
     public static final FunctionIdentifier SERIAL_SQL_SUM = FunctionConstants.newAsterix("sql-sum-serial", 1);
     public static final FunctionIdentifier SERIAL_LOCAL_SQL_SUM =
             FunctionConstants.newAsterix("local-sql-sum-serial", 1);
@@ -744,6 +753,10 @@
             FunctionConstants.newAsterix("agg-sql-count-distinct", 1);
     public static final FunctionIdentifier SCALAR_SQL_COUNT_DISTINCT =
             FunctionConstants.newAsterix("sql-count-distinct", 1);
+    public static final FunctionIdentifier SQL_COUNTN_DISTINCT =
+            FunctionConstants.newAsterix("agg-sql-countn-distinct", 1);
+    public static final FunctionIdentifier SCALAR_SQL_COUNTN_DISTINCT =
+            FunctionConstants.newAsterix("sql-countn-distinct", 1);
     public static final FunctionIdentifier SQL_SUM_DISTINCT = FunctionConstants.newAsterix("agg-sql-sum-distinct", 1);
     public static final FunctionIdentifier SCALAR_SQL_SUM_DISTINCT =
             FunctionConstants.newAsterix("sql-sum-distinct", 1);
@@ -1575,6 +1588,7 @@
         addPrivateFunction(GLOBAL_MIN, MinMaxAggTypeComputer.INSTANCE, true);
         addPrivateFunction(NON_EMPTY_STREAM, ABooleanTypeComputer.INSTANCE, true);
         addFunction(COUNT, AInt64TypeComputer.INSTANCE, true);
+        addFunction(COUNTN, CountNTypeComputer.INSTANCE, true);
         addPrivateFunction(LOCAL_AVG, LocalAvgTypeComputer.INSTANCE, true);
         addFunction(AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addPrivateFunction(GLOBAL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
@@ -1623,11 +1637,13 @@
 
         addPrivateFunction(SERIAL_SQL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_SQL_COUNT, AInt64TypeComputer.INSTANCE, true);
+        addPrivateFunction(SERIAL_SQL_COUNTN, CountNTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_GLOBAL_SQL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_LOCAL_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_INTERMEDIATE_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
         addFunction(SCALAR_AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addFunction(SCALAR_COUNT, AInt64TypeComputer.INSTANCE, true);
+        addFunction(SCALAR_COUNTN, CountNTypeComputer.INSTANCE, true);
         addFunction(SCALAR_MAX, scalarMinMaxTypeComputer, true);
         addFunction(SCALAR_MIN, scalarMinMaxTypeComputer, true);
         addPrivateFunction(INTERMEDIATE_AVG, LocalAvgTypeComputer.INSTANCE, true);
@@ -1685,6 +1701,7 @@
         addPrivateFunction(LOCAL_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
         addPrivateFunction(INTERMEDIATE_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
         addFunction(SQL_COUNT, AInt64TypeComputer.INSTANCE, true);
+        addFunction(SQL_COUNTN, CountNTypeComputer.INSTANCE, true);
         addFunction(SQL_MAX, MinMaxAggTypeComputer.INSTANCE, true);
         addPrivateFunction(LOCAL_SQL_MAX, MinMaxAggTypeComputer.INSTANCE, true);
         addPrivateFunction(INTERMEDIATE_SQL_MAX, MinMaxAggTypeComputer.INSTANCE, true);
@@ -1695,6 +1712,7 @@
         addPrivateFunction(GLOBAL_SQL_MIN, MinMaxAggTypeComputer.INSTANCE, true);
         addFunction(SCALAR_SQL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addFunction(SCALAR_SQL_COUNT, AInt64TypeComputer.INSTANCE, true);
+        addFunction(SCALAR_SQL_COUNTN, CountNTypeComputer.INSTANCE, true);
         addFunction(SCALAR_SQL_MAX, scalarMinMaxTypeComputer, true);
         addFunction(SCALAR_SQL_MIN, scalarMinMaxTypeComputer, true);
         addPrivateFunction(INTERMEDIATE_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
@@ -1743,6 +1761,7 @@
 
         addPrivateFunction(SERIAL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_COUNT, AInt64TypeComputer.INSTANCE, true);
+        addPrivateFunction(SERIAL_COUNTN, CountNTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_GLOBAL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_LOCAL_AVG, LocalAvgTypeComputer.INSTANCE, true);
         addPrivateFunction(SERIAL_INTERMEDIATE_AVG, LocalAvgTypeComputer.INSTANCE, true);
@@ -1782,6 +1801,11 @@
         addFunction(SQL_COUNT_DISTINCT, AInt64TypeComputer.INSTANCE, true);
         addFunction(SCALAR_SQL_COUNT_DISTINCT, AInt64TypeComputer.INSTANCE, true);
 
+        addFunction(COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+        addFunction(SCALAR_COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+        addFunction(SQL_COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+        addFunction(SCALAR_SQL_COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+
         addFunction(SUM_DISTINCT, NumericSumAggTypeComputer.INSTANCE, true);
         addFunction(SCALAR_SUM_DISTINCT, scalarNumericSumTypeComputer, true);
         addFunction(SQL_SUM_DISTINCT, NumericSumAggTypeComputer.INSTANCE, true);
@@ -2208,6 +2232,26 @@
         addDistinctAgg(COUNT_DISTINCT, COUNT);
         addScalarAgg(COUNT_DISTINCT, SCALAR_COUNT_DISTINCT);
 
+        // COUNTN
+
+        addAgg(COUNTN);
+        addLocalAgg(COUNTN, COUNTN);
+        addIntermediateAgg(COUNTN, SUM);
+        addGlobalAgg(COUNTN, SUM);
+
+        addScalarAgg(COUNTN, SCALAR_COUNTN);
+
+        addSerialAgg(COUNTN, SERIAL_COUNTN);
+        addAgg(SERIAL_COUNTN);
+        addLocalAgg(SERIAL_COUNTN, SERIAL_COUNTN);
+        addIntermediateAgg(SERIAL_COUNTN, SERIAL_SUM);
+        addGlobalAgg(SERIAL_COUNTN, SERIAL_SUM);
+
+        // COUNTN DISTINCT
+
+        addDistinctAgg(COUNTN_DISTINCT, COUNTN);
+        addScalarAgg(COUNTN_DISTINCT, SCALAR_COUNTN_DISTINCT);
+
         // MAX
         addAgg(MAX);
         addAgg(LOCAL_MAX);
@@ -2719,6 +2763,26 @@
         addDistinctAgg(SQL_COUNT_DISTINCT, SQL_COUNT);
         addScalarAgg(SQL_COUNT_DISTINCT, SCALAR_SQL_COUNT_DISTINCT);
 
+        // SQL COUNTN
+
+        addAgg(SQL_COUNTN);
+        addLocalAgg(SQL_COUNTN, SQL_COUNTN);
+        addIntermediateAgg(SQL_COUNTN, SQL_SUM);
+        addGlobalAgg(SQL_COUNTN, SQL_SUM);
+
+        addScalarAgg(SQL_COUNTN, SCALAR_SQL_COUNTN);
+
+        addSerialAgg(SQL_COUNTN, SERIAL_SQL_COUNTN);
+        addAgg(SERIAL_SQL_COUNTN);
+        addLocalAgg(SERIAL_SQL_COUNTN, SERIAL_SQL_COUNTN);
+        addIntermediateAgg(SERIAL_SQL_COUNTN, SERIAL_SQL_SUM);
+        addGlobalAgg(SERIAL_SQL_COUNTN, SERIAL_SQL_SUM);
+
+        // SQL COUNTN DISTINCT
+
+        addDistinctAgg(SQL_COUNTN_DISTINCT, SQL_COUNTN);
+        addScalarAgg(SQL_COUNTN_DISTINCT, SCALAR_SQL_COUNTN_DISTINCT);
+
         // SQL MAX
         addAgg(SQL_MAX);
         addAgg(LOCAL_SQL_MAX);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
index 6637af8..440fd6f 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
@@ -22,6 +22,7 @@
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.LinkedHashSet;
 import java.util.List;
 
 import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
@@ -39,6 +40,8 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.util.string.UTF8StringWriter;
 
+import it.unimi.dsi.fastutil.ints.IntArrayList;
+
 /**
  * This class interprets the binary data representation of a record. One can
  * call getFieldNames, getFieldTypeTags and getFieldValues to get pointable
@@ -56,6 +59,8 @@
     // access results: field names, field types, and field values
     private final List<IVisitablePointable> fieldNames = new ArrayList<>();
     private final List<IVisitablePointable> fieldValues = new ArrayList<>();
+    private final IntArrayList reverseLookupClosedFields = new IntArrayList();
+    private int numFields = 0;
 
     // pointable allocator
     private final PointableAllocator allocator = new PointableAllocator();
@@ -63,6 +68,7 @@
     private final ResettableByteArrayOutputStream typeBos = new ResettableByteArrayOutputStream();
 
     private final ResettableByteArrayOutputStream dataBos = new ResettableByteArrayOutputStream();
+
     private final DataOutputStream dataDos = new DataOutputStream(dataBos);
 
     private final ARecordType inputRecType;
@@ -91,15 +97,38 @@
         try {
             final DataOutputStream typeDos = new DataOutputStream(typeBos);
             final UTF8StringWriter utf8Writer = new UTF8StringWriter();
-            for (int i = 0; i < numberOfSchemaFields; i++) {
-                // add type name Reference (including a string type tag)
-                int nameStart = typeBos.size();
-                typeDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
-                utf8Writer.writeUTF8(fieldNameStrs[i], typeDos);
-                int nameEnd = typeBos.size();
-                IVisitablePointable typeNameReference = AFlatValuePointable.FACTORY.create(null);
-                typeNameReference.set(typeBos.getByteArray(), nameStart, nameEnd - nameStart);
-                fieldNames.add(typeNameReference);
+            LinkedHashSet<String> allOrderedFields = inputType.getAllOrderedFields();
+            if (allOrderedFields != null) {
+                numFields = allOrderedFields.size();
+                int index = 0;
+                int nameInClosedField = 0;
+                for (String field : allOrderedFields) {
+                    int nameStart = typeBos.size();
+                    typeDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
+                    utf8Writer.writeUTF8(field, typeDos);
+                    int nameEnd = typeBos.size();
+                    IVisitablePointable typeNameReference = AFlatValuePointable.FACTORY.create(null);
+                    typeNameReference.set(typeBos.getByteArray(), nameStart, nameEnd - nameStart);
+                    fieldNames.add(typeNameReference);
+                    fieldValues.add(missingReference);
+                    if (nameInClosedField < numberOfSchemaFields && field.equals(fieldNameStrs[nameInClosedField])) {
+                        reverseLookupClosedFields.add(index);
+                        nameInClosedField++;
+                    }
+                    index++;
+                }
+            } else {
+                for (int i = 0; i < numberOfSchemaFields; i++) {
+                    // add type name Reference (including a string type tag)
+                    int nameStart = typeBos.size();
+                    typeDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
+                    utf8Writer.writeUTF8(fieldNameStrs[i], typeDos);
+                    int nameEnd = typeBos.size();
+                    IVisitablePointable typeNameReference = AFlatValuePointable.FACTORY.create(null);
+                    typeNameReference.set(typeBos.getByteArray(), nameStart, nameEnd - nameStart);
+                    fieldNames.add(typeNameReference);
+                    reverseLookupClosedFields.add(fieldNames.size() - 1);
+                }
             }
 
             // initialize a constant: null value bytes reference
@@ -113,6 +142,7 @@
             typeDos.writeByte(ATypeTag.SERIALIZED_MISSING_TYPE_TAG);
             int missingFieldEnd = typeBos.size();
             missingReference.set(typeBos.getByteArray(), missingFieldStart, missingFieldEnd - missingFieldStart);
+
         } catch (IOException e) {
             throw new IllegalStateException(e);
         }
@@ -126,11 +156,22 @@
         // reset the allocator
         allocator.reset();
 
+        int removeTill = numFields;
+        if (numFields == 0) {
+            removeTill = numberOfSchemaFields;
+        }
         // clean up the returned containers
-        for (int i = fieldNames.size() - 1; i >= numberOfSchemaFields; i--) {
+        for (int i = fieldNames.size() - 1; i >= removeTill; i--) {
             fieldNames.remove(i);
         }
-        fieldValues.clear();
+
+        for (int i = fieldValues.size() - 1; i >= numFields; i--) {
+            fieldValues.remove(i);
+        }
+
+        for (int i = 0; i < numFields; i++) {
+            fieldValues.set(i, missingReference);
+        }
     }
 
     @Override
@@ -176,16 +217,25 @@
                     offsetArrayOffset += 4;
                 }
                 for (int fieldNumber = 0; fieldNumber < numberOfSchemaFields; fieldNumber++) {
+                    int index = reverseLookupClosedFields.get(fieldNumber);
                     if (hasOptionalFields) {
                         byte b1 = b[nullBitMapOffset + fieldNumber / 4];
                         if (RecordUtil.isNull(b1, fieldNumber)) {
                             // set null value (including type tag inside)
-                            fieldValues.add(nullReference);
+                            if (index < numFields) {
+                                fieldValues.set(index, nullReference);
+                            } else {
+                                fieldValues.add(nullReference);
+                            }
                             continue;
                         }
                         if (RecordUtil.isMissing(b1, fieldNumber)) {
                             // set missing value (including type tag inside)
-                            fieldValues.add(missingReference);
+                            if (index < numFields) {
+                                fieldValues.set(index, missingReference);
+                            } else {
+                                fieldValues.add(missingReference);
+                            }
                             continue;
                         }
                     }
@@ -212,12 +262,18 @@
                     int fend = dataBos.size();
                     IVisitablePointable fieldValue = allocator.allocateFieldValue(fieldType);
                     fieldValue.set(dataBos.getByteArray(), fstart, fend - fstart);
-                    fieldValues.add(fieldValue);
+                    if (index < numFields) {
+                        fieldValues.set(index, fieldValue);
+                    } else {
+                        fieldValues.add(fieldValue);
+                    }
                 }
             }
             if (isExpanded) {
                 int numberOfOpenFields = AInt32SerializerDeserializer.getInt(b, openPartOffset);
                 int fieldOffset = openPartOffset + 4 + (8 * numberOfOpenFields);
+                int currentCheck = 0;
+                int reverseLookupIndex = 0;
                 for (int i = 0; i < numberOfOpenFields; i++) {
                     // set the field name (including a type tag, which is a string)
                     int fieldValueLength =
@@ -228,7 +284,22 @@
                     int fnend = dataBos.size();
                     IVisitablePointable fieldName = allocator.allocateEmpty();
                     fieldName.set(dataBos.getByteArray(), fnstart, fnend - fnstart);
-                    fieldNames.add(fieldName);
+                    boolean addItToNumFields = true;
+                    for (; currentCheck < numFields; currentCheck++) {
+                        if (reverseLookupIndex < reverseLookupClosedFields.size()
+                                && currentCheck == reverseLookupClosedFields.get(reverseLookupIndex)) {
+                            reverseLookupIndex++;
+                            continue;
+                        }
+
+                        if (fieldNames.get(currentCheck).equals(fieldName)) {
+                            break;
+                        }
+                    }
+                    if (currentCheck >= numFields) {
+                        addItToNumFields = false;
+                        fieldNames.add(fieldName);
+                    }
                     fieldOffset += fieldValueLength;
 
                     typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[fieldOffset]);
@@ -238,7 +309,12 @@
                     // allocate
                     IVisitablePointable fieldValueAccessor = allocator.allocateFieldValue(typeTag, b, fieldOffset + 1);
                     fieldValueAccessor.set(b, fieldOffset, fieldValueLength);
-                    fieldValues.add(fieldValueAccessor);
+                    if (!addItToNumFields) {
+                        fieldValues.add(fieldValueAccessor);
+                    } else {
+                        fieldValues.set(currentCheck, fieldValueAccessor);
+                        currentCheck++;
+                    }
                     fieldOffset += fieldValueLength;
                 }
             }
@@ -263,5 +339,4 @@
     public <R, T> R accept(IVisitablePointableVisitor<R, T> vistor, T tag) throws HyracksDataException {
         return vistor.visit(this, tag);
     }
-
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/ARecordPrinter.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/ARecordPrinter.java
index 7823637..9aaf889 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/ARecordPrinter.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/ARecordPrinter.java
@@ -33,13 +33,13 @@
  * This class is to print the content of a record.
  */
 public class ARecordPrinter {
-    private final String startRecord;
-    private final String endRecord;
-    private final String fieldSeparator;
-    private final String fieldNameSeparator;
+    protected final String startRecord;
+    protected final String endRecord;
+    protected final String fieldSeparator;
+    protected final String fieldNameSeparator;
 
-    private final Pair<PrintStream, ATypeTag> nameVisitorArg = new Pair<>(null, ATypeTag.STRING);
-    private final Pair<PrintStream, ATypeTag> itemVisitorArg = new Pair<>(null, null);
+    protected final Pair<PrintStream, ATypeTag> nameVisitorArg = new Pair<>(null, ATypeTag.STRING);
+    protected final Pair<PrintStream, ATypeTag> itemVisitorArg = new Pair<>(null, null);
 
     public ARecordPrinter(final String startRecord, final String endRecord, final String fieldSeparator,
             final String fieldNameSeparator) {
@@ -59,8 +59,8 @@
 
         ps.print(startRecord);
 
-        final int size = fieldNames.size();
         boolean first = true;
+        final int size = fieldNames.size();
         for (int i = 0; i < size; ++i) {
             final IVisitablePointable fieldName = fieldNames.get(i);
             final IVisitablePointable fieldValue = fieldValues.get(i);
@@ -82,7 +82,7 @@
         ps.print(endRecord);
     }
 
-    private void printField(PrintStream ps, IPrintVisitor visitor, IVisitablePointable fieldName,
+    protected void printField(PrintStream ps, IPrintVisitor visitor, IVisitablePointable fieldName,
             IVisitablePointable fieldValue, ATypeTag fieldTypeTag) throws HyracksDataException {
         itemVisitorArg.second = fieldTypeTag;
         if (fieldNameSeparator != null) {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/ACSVRecordPrinter.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/ACSVRecordPrinter.java
new file mode 100644
index 0000000..ce9e3a3
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/ACSVRecordPrinter.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.om.pointables.printer.csv;
+
+import static org.apache.asterix.om.types.hierachy.ATypeHierarchy.isCompatible;
+
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.om.pointables.ARecordVisitablePointable;
+import org.apache.asterix.om.pointables.base.IVisitablePointable;
+import org.apache.asterix.om.pointables.printer.ARecordPrinter;
+import org.apache.asterix.om.pointables.printer.IPrintVisitor;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.EnumDeserializer;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.util.string.UTF8StringUtil;
+
+public class ACSVRecordPrinter extends ARecordPrinter {
+    private ARecordType schema;
+    private boolean firstRecord;
+    private boolean header;
+    private final String recordDelimiter;
+    private static final List<ATypeTag> supportedTypes = List.of(ATypeTag.TINYINT, ATypeTag.SMALLINT, ATypeTag.INTEGER,
+            ATypeTag.BIGINT, ATypeTag.UINT8, ATypeTag.UINT16, ATypeTag.UINT64, ATypeTag.FLOAT, ATypeTag.DOUBLE,
+            ATypeTag.STRING, ATypeTag.BOOLEAN, ATypeTag.DATETIME, ATypeTag.UINT32, ATypeTag.DATE, ATypeTag.TIME);
+
+    public ACSVRecordPrinter(final String startRecord, final String endRecord, final String fieldSeparator,
+            final String fieldNameSeparator, String recordDelimiter, ARecordType schema, String headerStr) {
+        super(startRecord, endRecord, fieldSeparator, fieldNameSeparator);
+        this.schema = schema;
+        this.header = headerStr != null && Boolean.parseBoolean(headerStr);
+        this.firstRecord = true;
+        this.recordDelimiter = recordDelimiter;
+    }
+
+    @Override
+    public void printRecord(ARecordVisitablePointable recordAccessor, PrintStream ps, IPrintVisitor visitor)
+            throws HyracksDataException {
+        // backward compatibility -- No Schema print it as it is from recordAccessor
+        if (schema == null) {
+            super.printRecord(recordAccessor, ps, visitor);
+        } else {
+            printSchemaFullRecord(recordAccessor, ps, visitor);
+        }
+    }
+
+    private void printSchemaFullRecord(ARecordVisitablePointable recordAccessor, PrintStream ps, IPrintVisitor visitor)
+            throws HyracksDataException {
+        // check the schema for the record
+        // try producing the record into the record of expected schema
+        Map<String, ATypeTag> schemaDetails = new HashMap<>();
+        if (checkCSVSchema(recordAccessor, schemaDetails)) {
+            nameVisitorArg.first = ps;
+            itemVisitorArg.first = ps;
+            if (header) {
+                addHeader(recordAccessor, ps, visitor);
+            }
+            // add record delimiter
+            // by default the separator between the header and the records is "\n"
+            if (firstRecord) {
+                firstRecord = false;
+            } else {
+                ps.print(recordDelimiter);
+            }
+            final List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames();
+            final List<IVisitablePointable> fieldValues = recordAccessor.getFieldValues();
+
+            boolean first = true;
+            for (int i = 0; i < fieldNames.size(); ++i) {
+                final IVisitablePointable fieldNamePointable = fieldNames.get(i);
+                String fieldName = UTF8StringUtil.toString(fieldNamePointable.getByteArray(),
+                        fieldNamePointable.getStartOffset() + 1);
+                final IVisitablePointable fieldValue = fieldValues.get(i);
+                final ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                        .deserialize(fieldValue.getByteArray()[fieldValue.getStartOffset()]);
+                ATypeTag expectedTypeTag = schemaDetails.get(fieldName);
+                if (!isCompatible(typeTag, expectedTypeTag)) {
+                    expectedTypeTag = ATypeTag.NULL;
+                }
+                if (first) {
+                    first = false;
+                } else {
+                    ps.print(fieldSeparator);
+                }
+                printField(ps, visitor, fieldNamePointable, fieldValue, expectedTypeTag);
+            }
+        }
+    }
+
+    private boolean checkCSVSchema(ARecordVisitablePointable recordAccessor, Map<String, ATypeTag> schemaDetails) {
+        final List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames();
+        final List<IVisitablePointable> fieldValues = recordAccessor.getFieldValues();
+        final List<String> expectedFieldNames = Arrays.asList(schema.getFieldNames());
+        final List<IAType> expectedFieldTypes = Arrays.asList(schema.getFieldTypes());
+        if (fieldNames.size() != expectedFieldNames.size()) {
+            // todo: raise warning about schema mismatch
+            return false;
+        }
+        for (int i = 0; i < fieldNames.size(); ++i) {
+            final IVisitablePointable fieldName = fieldNames.get(i);
+            String fieldColumnName = UTF8StringUtil.toString(fieldName.getByteArray(), fieldName.getStartOffset() + 1);
+            final IVisitablePointable fieldValue = fieldValues.get(i);
+            final ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                    .deserialize(fieldValue.getByteArray()[fieldValue.getStartOffset()]);
+            ATypeTag expectedType;
+            boolean canNull = false;
+            if (expectedFieldNames.contains(fieldColumnName)) {
+                IAType expectedIAType = expectedFieldTypes.get(expectedFieldNames.indexOf(fieldColumnName));
+                if (!supportedTypes.contains(expectedIAType.getTypeTag())) {
+                    if (expectedIAType.getTypeTag().equals(ATypeTag.UNION)) {
+                        AUnionType unionType = (AUnionType) expectedIAType;
+                        expectedType = unionType.getActualType().getTypeTag();
+                        canNull = unionType.isNullableType();
+                        if (!supportedTypes.contains(expectedType)) {
+                            // unsupported DataType
+                            return false;
+                        }
+                    } else {
+                        // todo: unexpected type
+                        return false;
+                    }
+                } else {
+                    expectedType = expectedIAType.getTypeTag();
+                }
+                schemaDetails.put(fieldColumnName, expectedType);
+            } else {
+                // todo: raise warning about schema mismatch
+                return false;
+            }
+            if (typeTag.equals(ATypeTag.MISSING) || (typeTag.equals(ATypeTag.NULL) && !canNull)) {
+                // todo: raise warning about schema mismatch
+                return false;
+            }
+            if (!isCompatible(typeTag, expectedType) && !canNull) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private void addHeader(ARecordVisitablePointable recordAccessor, PrintStream ps, IPrintVisitor visitor)
+            throws HyracksDataException {
+        //check if it is a first record
+        if (firstRecord) {
+            final List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames();
+            boolean first = true;
+            for (int i = 0; i < fieldNames.size(); ++i) {
+                if (first) {
+                    first = false;
+                } else {
+                    ps.print(fieldSeparator);
+                }
+                printFieldName(ps, visitor, fieldNames.get(i));
+            }
+            firstRecord = false;
+        }
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/APrintVisitor.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/APrintVisitor.java
index 3f22374..22c502b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/APrintVisitor.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/printer/csv/APrintVisitor.java
@@ -19,14 +19,20 @@
 
 package org.apache.asterix.om.pointables.printer.csv;
 
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_HEADER;
+import static org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils.KEY_RECORD_DELIMITER;
+
 import java.io.PrintStream;
+import java.util.Map;
 
 import org.apache.asterix.dataflow.data.nontagged.printers.csv.AObjectPrinterFactory;
+import org.apache.asterix.dataflow.data.nontagged.printers.csv.CSVUtils;
 import org.apache.asterix.om.pointables.AListVisitablePointable;
 import org.apache.asterix.om.pointables.ARecordVisitablePointable;
 import org.apache.asterix.om.pointables.printer.AListPrinter;
 import org.apache.asterix.om.pointables.printer.ARecordPrinter;
 import org.apache.asterix.om.pointables.printer.AbstractPrintVisitor;
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -36,6 +42,15 @@
  * PrintStream in CSV format.
  */
 public class APrintVisitor extends AbstractPrintVisitor {
+    private final ARecordType itemType;
+    private final Map<String, String> configuration;
+
+    public APrintVisitor(ARecordType itemType, Map<String, String> configuration) {
+        super();
+        this.itemType = itemType;
+        this.configuration = configuration;
+    }
+
     @Override
     protected AListPrinter createListPrinter(AListVisitablePointable accessor) throws HyracksDataException {
         throw new HyracksDataException("'List' type unsupported for CSV output");
@@ -43,12 +58,15 @@
 
     @Override
     protected ARecordPrinter createRecordPrinter(ARecordVisitablePointable accessor) {
-        return new ARecordPrinter("", "", ",", null);
+        String delimiter = CSVUtils.getDelimiter(configuration);
+        String recordDelimiter = configuration.get(KEY_RECORD_DELIMITER) == null ? (itemType == null ? "" : "\n")
+                : configuration.get(KEY_RECORD_DELIMITER);
+        return new ACSVRecordPrinter("", "", delimiter, null, recordDelimiter, itemType, configuration.get(KEY_HEADER));
     }
 
     @Override
     protected boolean printFlatValue(ATypeTag typeTag, byte[] b, int s, int l, PrintStream ps)
             throws HyracksDataException {
-        return AObjectPrinterFactory.printFlatValue(typeTag, b, s, l, ps);
+        return AObjectPrinterFactory.createInstance(itemType, configuration).printFlatValue(typeTag, b, s, l, ps);
     }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CountNTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CountNTypeComputer.java
new file mode 100644
index 0000000..4178315
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CountNTypeComputer.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.typecomputer.impl;
+
+import org.apache.asterix.om.typecomputer.base.AbstractResultTypeComputer;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+
+public class CountNTypeComputer extends AbstractResultTypeComputer {
+
+    public static final CountNTypeComputer INSTANCE = new CountNTypeComputer();
+
+    private CountNTypeComputer() {
+    }
+
+    protected IAType getResultType(ILogicalExpression expr, IAType... strippedInputTypes) throws AlgebricksException {
+        return AUnionType.createNullableType(BuiltinType.AINT64);
+    }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
index 838f6f8..bc3ae60 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
@@ -24,9 +24,11 @@
 import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
 import org.apache.asterix.common.annotations.RecordFieldOrderAnnotation;
+import org.apache.asterix.common.config.CompilerProperties;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -43,6 +45,7 @@
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
 import org.apache.hyracks.util.LogRedactionUtil;
 
 public class OpenRecordConstructorResultType implements IResultTypeComputer {
@@ -62,6 +65,13 @@
             return type;
         }
 
+        boolean orderFields = AlgebricksConfig.ORDERED_FIELDS;
+        if (metadataProvider != null) {
+            Map<String, Object> config = metadataProvider.getConfig();
+            orderFields = Boolean.parseBoolean((String) config.getOrDefault(CompilerProperties.COMPILER_ORDERFIELDS_KEY,
+                    String.valueOf(orderFields)));
+        }
+
         Iterator<Mutable<ILogicalExpression>> argIter = f.getArguments().iterator();
         List<String> namesList = new ArrayList<>();
         List<IAType> typesList = new ArrayList<>();
@@ -102,7 +112,12 @@
         IAType[] fieldTypes = typesList.toArray(new IAType[0]);
         ARecordType resultType;
         if (isOpen && canProvideAdditionFieldInfo) {
-            resultType = new ARecordType(null, fieldNames, fieldTypes, isOpen, allPossibleAdditionalFieldNames);
+            if (orderFields) {
+                resultType = new ARecordType(null, fieldNames, fieldTypes, isOpen, allPossibleAdditionalFieldNames,
+                        allPossibleFieldNamesOrdered);
+            } else {
+                resultType = new ARecordType(null, fieldNames, fieldTypes, isOpen, allPossibleAdditionalFieldNames);
+            }
             resultType.getAnnotations().add(new RecordFieldOrderAnnotation(allPossibleFieldNamesOrdered));
         } else {
             resultType = new ARecordType(null, fieldNames, fieldTypes, isOpen);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
index fd51433..ba352ac 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -54,10 +55,15 @@
 
     private static final long serialVersionUID = 1L;
     private static final JavaType SET = OBJECT_MAPPER.getTypeFactory().constructCollectionType(Set.class, String.class);
+
+    private static final JavaType LIST =
+            OBJECT_MAPPER.getTypeFactory().constructCollectionType(LinkedHashSet.class, String.class);
     private static final String IS_OPEN = "isOpen";
     private static final String FIELD_NAMES = "fieldNames";
     private static final String FIELD_TYPES = "fieldTypes";
     private static final String ADDITIONAL_FIELDS = "additionalFieldNames";
+
+    private static final String ORDERED_FIELDS = "orderedFields";
     private final String[] fieldNames;
     private final IAType[] fieldTypes;
     private final Map<String, Integer> fieldNameToIndexMap = new HashMap<>();
@@ -71,6 +77,8 @@
     // the bounded set of all possible additional field names.
     private final Set<String> allPossibleAdditionalFieldNames;
 
+    private final LinkedHashSet<String> allOrderedFields;
+
     /**
      * @param typeName   the name of the type
      * @param fieldNames the names of the closed fields
@@ -78,7 +86,31 @@
      * @param isOpen     whether the record is open
      */
     public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen) {
-        this(typeName, fieldNames, fieldTypes, isOpen, null);
+        this(typeName, fieldNames, fieldTypes, isOpen, null, null);
+    }
+
+    /**
+     * @param typeName   the name of the type
+     * @param fieldNames the names of the closed fields
+     * @param fieldTypes the types of the closed fields
+     * @param isOpen     whether the record is open
+     * @param allPossibleAdditionalFieldNames, all possible additional field names.
+     */
+    public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen,
+            Set<String> allPossibleAdditionalFieldNames) {
+        this(typeName, fieldNames, fieldTypes, isOpen, allPossibleAdditionalFieldNames, null);
+    }
+
+    /**
+     * @param typeName   the name of the type
+     * @param fieldNames the names of the closed fields
+     * @param fieldTypes the types of the closed fields
+     * @param isOpen     whether the record is open
+     * @param allOrderedFields fields in order.
+     */
+    public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen,
+            LinkedHashSet<String> allOrderedFields) {
+        this(typeName, fieldNames, fieldTypes, isOpen, null, allOrderedFields);
     }
 
     /**
@@ -86,10 +118,11 @@
      * @param fieldNames                       the names of the closed fields
      * @param fieldTypes                       the types of the closed fields
      * @param isOpen                           whether the record is open
-     * @param allPossibleAdditionalFieldNames, all possible additional field names.
+     * @param allPossibleAdditionalFieldNames, all possible additional field names
+     * @param allOrderedFields fields in order.
      */
     public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen,
-            Set<String> allPossibleAdditionalFieldNames) {
+            Set<String> allPossibleAdditionalFieldNames, LinkedHashSet<String> allOrderedFields) {
         super(typeName);
         this.fieldNames = fieldNames;
         this.fieldTypes = fieldTypes;
@@ -100,6 +133,7 @@
             fieldNameToIndexMap.put(fieldNames[index], index);
         }
         this.allPossibleAdditionalFieldNames = allPossibleAdditionalFieldNames;
+        this.allOrderedFields = allOrderedFields;
     }
 
     public boolean canContainField(String fieldName) {
@@ -383,6 +417,7 @@
         jsonObject.put(IS_OPEN, isOpen);
         jsonObject.putPOJO(FIELD_NAMES, fieldNames);
         jsonObject.putPOJO(ADDITIONAL_FIELDS, allPossibleAdditionalFieldNames);
+        jsonObject.putPOJO(ORDERED_FIELDS, allOrderedFields);
         ArrayNode fieldTypesArray = OBJECT_MAPPER.createArrayNode();
         for (int i = 0; i < fieldTypes.length; i++) {
             fieldTypesArray.add(fieldTypes[i].toJson(registry));
@@ -397,12 +432,17 @@
         boolean isOpen = json.get(IS_OPEN).asBoolean();
         String[] fieldNames = OBJECT_MAPPER.convertValue(json.get(FIELD_NAMES), String[].class);
         Set<String> additionalFields = OBJECT_MAPPER.convertValue(json.get(ADDITIONAL_FIELDS), SET);
+        LinkedHashSet<String> orderedFields = OBJECT_MAPPER.convertValue(json.get(ORDERED_FIELDS), LIST);
         ArrayNode fieldTypesNode = (ArrayNode) json.get(FIELD_TYPES);
         IAType[] fieldTypes = new IAType[fieldTypesNode.size()];
         for (int i = 0; i < fieldTypesNode.size(); i++) {
             fieldTypes[i] = (IAType) registry.deserialize(fieldTypesNode.get(i));
         }
-        return new ARecordType(typeName, fieldNames, fieldTypes, isOpen, additionalFields);
+        return new ARecordType(typeName, fieldNames, fieldTypes, isOpen, additionalFields, orderedFields);
+    }
+
+    public LinkedHashSet<String> getAllOrderedFields() {
+        return allOrderedFields;
     }
 
     public List<IAType> getFieldTypes(List<List<String>> fields) throws AlgebricksException {
diff --git a/asterixdb/asterix-runtime/pom.xml b/asterixdb/asterix-runtime/pom.xml
index 138c4dd..b466d0d 100644
--- a/asterixdb/asterix-runtime/pom.xml
+++ b/asterixdb/asterix-runtime/pom.xml
@@ -151,8 +151,12 @@
       <artifactId>commons-collections4</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.esri.geometry</groupId>
-      <artifactId>esri-geometry-api</artifactId>
+      <groupId>org.locationtech.jts</groupId>
+      <artifactId>jts-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.parquet</groupId>
+      <artifactId>parquet-column</artifactId>
     </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNAggregateDescriptor.java
new file mode 100644
index 0000000..fb2fbc8
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNAggregateDescriptor.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.CountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarCountNAggregateDescriptor extends AbstractScalarAggregateDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_COUNTN;
+
+    public static final IFunctionDescriptorFactory FACTORY = ScalarCountNAggregateDescriptor::new;
+
+    private ScalarCountNAggregateDescriptor() {
+        super(CountNAggregateDescriptor.FACTORY);
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNDistinctAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNDistinctAggregateDescriptor.java
new file mode 100644
index 0000000..6f4cf3a
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNDistinctAggregateDescriptor.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.CountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarCountNDistinctAggregateDescriptor extends AbstractScalarDistinctAggregateDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_COUNTN_DISTINCT;
+
+    public static final IFunctionDescriptorFactory FACTORY =
+            createDescriptorFactory(ScalarCountNDistinctAggregateDescriptor::new);
+
+    private ScalarCountNDistinctAggregateDescriptor() {
+        super(CountNAggregateDescriptor.FACTORY);
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNAggregateDescriptor.java
new file mode 100644
index 0000000..c0925d4
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNAggregateDescriptor.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.SqlCountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarSqlCountNAggregateDescriptor extends AbstractScalarAggregateDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_SQL_COUNTN;
+
+    public static final IFunctionDescriptorFactory FACTORY = ScalarSqlCountNAggregateDescriptor::new;
+
+    private ScalarSqlCountNAggregateDescriptor() {
+        super(SqlCountNAggregateDescriptor.FACTORY);
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNDistinctAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNDistinctAggregateDescriptor.java
new file mode 100644
index 0000000..a163b8f
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNDistinctAggregateDescriptor.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.SqlCountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarSqlCountNDistinctAggregateDescriptor extends AbstractScalarDistinctAggregateDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_SQL_COUNTN_DISTINCT;
+
+    public static final IFunctionDescriptorFactory FACTORY =
+            createDescriptorFactory(ScalarSqlCountNDistinctAggregateDescriptor::new);
+
+    private ScalarSqlCountNDistinctAggregateDescriptor() {
+        super(SqlCountNAggregateDescriptor.FACTORY);
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
index 03dacd2..1e611b9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
@@ -81,7 +81,7 @@
         if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
             processNull(state, start);
         } else {
-            cnt++;
+            cnt = processValue(typeTag, cnt);
         }
         BufferSerDeUtil.writeBoolean(metNull, state, start + MET_NULL_OFFSET);
         BufferSerDeUtil.writeLong(cnt, state, start + COUNT_OFFSET);
@@ -111,4 +111,8 @@
     protected void processNull(byte[] state, int start) {
         BufferSerDeUtil.writeBoolean(true, state, start + MET_NULL_OFFSET);
     }
+
+    protected long processValue(ATypeTag tag, long cnt) {
+        return cnt + 1;
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateDescriptor.java
new file mode 100644
index 0000000..113c514
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateDescriptor.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractSerializableAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. Returns NULL if MISSING or NULL is encountered.
+ */
+public class SerializableCountNAggregateDescriptor extends AbstractSerializableAggregateFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = SerializableCountNAggregateDescriptor::new;
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return BuiltinFunctions.SERIAL_COUNTN;
+    }
+
+    @Override
+    public ISerializedAggregateEvaluatorFactory createSerializableAggregateEvaluatorFactory(
+            final IScalarEvaluatorFactory[] args) {
+        return new ISerializedAggregateEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ISerializedAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx)
+                    throws HyracksDataException {
+                return new SerializableCountNAggregateFunction(args, ctx, sourceLoc);
+            }
+        };
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateFunction.java
new file mode 100644
index 0000000..020d367
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateFunction.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. Returns NULL if MISSING or NULL is encountered.
+ */
+public class SerializableCountNAggregateFunction extends AbstractSerializableCountAggregateFunction {
+    public SerializableCountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
+            SourceLocation sourceLoc) throws HyracksDataException {
+        super(args, context, sourceLoc);
+    }
+
+    @Override
+    protected long processValue(ATypeTag typeTag, long cnt) {
+        if (ATypeHierarchy.getTypeDomain(typeTag) == ATypeHierarchy.Domain.NUMERIC) {
+            return cnt + 1;
+        }
+        return cnt;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateDescriptor.java
new file mode 100644
index 0000000..38c5150
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateDescriptor.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractSerializableAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. NULLs and MISSINGs are ignored.
+ */
+public class SerializableSqlCountNAggregateDescriptor extends AbstractSerializableAggregateFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = SerializableSqlCountNAggregateDescriptor::new;
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return BuiltinFunctions.SERIAL_SQL_COUNTN;
+    }
+
+    @Override
+    public ISerializedAggregateEvaluatorFactory createSerializableAggregateEvaluatorFactory(
+            final IScalarEvaluatorFactory[] args) {
+        return new ISerializedAggregateEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ISerializedAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx)
+                    throws HyracksDataException {
+                return new SerializableSqlCountNAggregateFunction(args, ctx, sourceLoc);
+            }
+        };
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateFunction.java
new file mode 100644
index 0000000..41a1df3
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateFunction.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. NULLs and MISSINGs are ignored.
+ */
+public class SerializableSqlCountNAggregateFunction extends AbstractSerializableCountAggregateFunction {
+    public SerializableSqlCountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
+            SourceLocation sourceLoc) throws HyracksDataException {
+        super(args, context, sourceLoc);
+    }
+
+    @Override
+    protected void processNull(byte[] state, int start) {
+    }
+
+    @Override
+    protected long processValue(ATypeTag typeTag, long cnt) {
+        if (ATypeHierarchy.getTypeDomain(typeTag) == ATypeHierarchy.Domain.NUMERIC) {
+            return cnt + 1;
+        }
+        return cnt;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
index 5756f00..1296d82 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
@@ -23,6 +23,7 @@
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.om.base.AInt64;
 import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.ANull;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.EnumDeserializer;
@@ -45,9 +46,13 @@
     @SuppressWarnings("unchecked")
     private ISerializerDeserializer<AInt64> int64Serde =
             SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
+    @SuppressWarnings("unchecked")
+    private ISerializerDeserializer<ANull> nullSerde =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
     private IPointable inputVal = new VoidPointable();
     private IScalarEvaluator eval;
     protected long cnt;
+    protected boolean nullRes;
 
     private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
 
@@ -60,6 +65,7 @@
     @Override
     public void init() throws HyracksDataException {
         cnt = 0;
+        nullRes = false;
     }
 
     @Override
@@ -71,7 +77,7 @@
         if (typeTag == ATypeTag.NULL || typeTag == ATypeTag.MISSING) {
             processNull();
         } else if (typeTag != ATypeTag.SYSTEM_NULL) {
-            cnt++;
+            processValue(typeTag);
         }
     }
 
@@ -79,8 +85,12 @@
     public void finish(IPointable resultPointable) throws HyracksDataException {
         resultStorage.reset();
         try {
-            result.setValue(cnt);
-            int64Serde.serialize(result, resultStorage.getDataOutput());
+            if (nullRes) {
+                nullSerde.serialize(ANull.NULL, resultStorage.getDataOutput());
+            } else {
+                result.setValue(cnt);
+                int64Serde.serialize(result, resultStorage.getDataOutput());
+            }
         } catch (IOException e) {
             throw HyracksDataException.create(e);
         }
@@ -93,4 +103,8 @@
     }
 
     protected abstract void processNull();
+
+    protected void processValue(ATypeTag tag) {
+        cnt++;
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateDescriptor.java
new file mode 100644
index 0000000..97cd1ad
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateDescriptor.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. Returns NULL if MISSING or NULL is encountered.
+ */
+public class CountNAggregateDescriptor extends AbstractAggregateFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = CountNAggregateDescriptor::new;
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return BuiltinFunctions.COUNTN;
+    }
+
+    @Override
+    public IAggregateEvaluatorFactory createAggregateEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
+        return new IAggregateEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx) throws HyracksDataException {
+                return new CountNAggregateFunction(args, ctx, sourceLoc);
+            }
+        };
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateFunction.java
new file mode 100644
index 0000000..c2bcb5f
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateFunction.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. Returns NULL if MISSING or NULL is encountered.
+ */
+public class CountNAggregateFunction extends AbstractCountAggregateFunction {
+
+    public CountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context, SourceLocation sourceLoc)
+            throws HyracksDataException {
+        super(args, context, sourceLoc);
+    }
+
+    @Override
+    protected void processNull() {
+        nullRes = true;
+    }
+
+    @Override
+    protected void processValue(ATypeTag tag) {
+        if (ATypeHierarchy.getTypeDomain(tag) == ATypeHierarchy.Domain.NUMERIC) {
+            cnt++;
+        }
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateDescriptor.java
new file mode 100644
index 0000000..d68d0bb
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateDescriptor.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. NULLs and MISSINGs are ignored.
+ */
+public class SqlCountNAggregateDescriptor extends AbstractAggregateFunctionDynamicDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = SqlCountNAggregateDescriptor::new;
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return BuiltinFunctions.SQL_COUNTN;
+    }
+
+    @Override
+    public IAggregateEvaluatorFactory createAggregateEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
+        return new IAggregateEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx) throws HyracksDataException {
+                return new SqlCountNAggregateFunction(args, ctx, sourceLoc);
+            }
+        };
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateFunction.java
new file mode 100644
index 0000000..db677ab
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateFunction.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. NULLs and MISSINGs are ignored.
+ */
+public class SqlCountNAggregateFunction extends AbstractCountAggregateFunction {
+
+    public SqlCountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
+            SourceLocation sourceLoc) throws HyracksDataException {
+        super(args, context, sourceLoc);
+    }
+
+    @Override
+    protected void processNull() {
+    }
+
+    @Override
+    protected void processValue(ATypeTag tag) {
+        if (ATypeHierarchy.getTypeDomain(tag) == ATypeHierarchy.Domain.NUMERIC) {
+            cnt++;
+        }
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
index 4f67fba..5615e14 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
@@ -45,8 +45,8 @@
 import org.apache.hyracks.data.std.primitive.VoidPointable;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Envelope;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
 
 public class CreateMBREvalFactory implements IScalarEvaluatorFactory {
 
@@ -257,25 +257,25 @@
                                 }
                                 break;
                             case GEOMETRY:
-                                Envelope record = new Envelope();
-                                AGeometrySerializerDeserializer.getAGeometryObject(data0, startOffset0 + 1)
-                                        .getGeometry().getEsriGeometry().queryEnvelope(record);
+                                Geometry geometry = AGeometrySerializerDeserializer
+                                        .getAGeometryObject(data0, startOffset0 + 1).getGeometry();
+                                Envelope envelope = geometry.getEnvelopeInternal();
                                 switch (coordinate) {
                                     case 0:
-                                        value = record.getXMin();
+                                        value = envelope.getMinX();
                                         break;
                                     case 1:
-                                        value = record.getYMin();
+                                        value = envelope.getMinY();
                                         break;
                                     case 2:
-                                        value = record.getXMax();
+                                        value = envelope.getMaxX();
                                         break;
                                     case 3:
-                                        value = record.getYMax();
+                                        value = envelope.getMaxY();
                                         break;
                                     default:
-                                        throw new NotImplementedException(
-                                                coordinate + "is not a valid coordinate option");
+                                        throw new IllegalArgumentException(
+                                                coordinate + " is not a valid coordinate option");
                                 }
                                 break;
                             case CIRCLE:
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
index 86a0790..d682d7a 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
@@ -37,6 +37,8 @@
 import org.apache.asterix.runtime.aggregates.scalar.ScalarAvgDistinctAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarCountAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarCountDistinctAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarCountNAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarCountNDistinctAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarFirstElementAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarKurtosisAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarKurtosisDistinctAggregateDescriptor;
@@ -50,6 +52,8 @@
 import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlAvgDistinctAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountDistinctAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountNAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountNDistinctAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlKurtosisAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlKurtosisDistinctAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlMaxAggregateDescriptor;
@@ -83,6 +87,7 @@
 import org.apache.asterix.runtime.aggregates.scalar.ScalarVarPopDistinctAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableAvgAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableCountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.serializable.std.SerializableCountNAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalAvgAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalKurtosisAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSkewnessAggregateDescriptor;
@@ -135,6 +140,7 @@
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSkewnessAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlAvgAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlCountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlCountNAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlKurtosisAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlSkewnessAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlStddevAggregateDescriptor;
@@ -149,6 +155,7 @@
 import org.apache.asterix.runtime.aggregates.serializable.std.SerializableVarPopAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.AvgAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.CountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.std.CountNAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.GlobalAvgAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.GlobalKurtosisAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.GlobalMaxAggregateDescriptor;
@@ -226,6 +233,7 @@
 import org.apache.asterix.runtime.aggregates.std.SkewnessAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.SqlAvgAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.SqlCountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.std.SqlCountNAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.SqlKurtosisAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.SqlMaxAggregateDescriptor;
 import org.apache.asterix.runtime.aggregates.std.SqlMedianAggregateDescriptor;
@@ -698,6 +706,7 @@
         // aggregate functions
         fc.add(ListifyAggregateDescriptor.FACTORY);
         fc.add(CountAggregateDescriptor.FACTORY);
+        fc.add(CountNAggregateDescriptor.FACTORY);
         fc.add(AvgAggregateDescriptor.FACTORY);
         fc.add(LocalAvgAggregateDescriptor.FACTORY);
         fc.add(IntermediateAvgAggregateDescriptor.FACTORY);
@@ -753,6 +762,7 @@
 
         // serializable aggregates
         fc.add(SerializableCountAggregateDescriptor.FACTORY);
+        fc.add(SerializableCountNAggregateDescriptor.FACTORY);
         fc.add(SerializableAvgAggregateDescriptor.FACTORY);
         fc.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
         fc.add(SerializableIntermediateAvgAggregateDescriptor.FACTORY);
@@ -790,7 +800,9 @@
         fc.add(ScalarArrayAggAggregateDescriptor.FACTORY);
         fc.add(ScalarArrayAggDistinctAggregateDescriptor.FACTORY);
         fc.add(ScalarCountAggregateDescriptor.FACTORY);
+        fc.add(ScalarCountNAggregateDescriptor.FACTORY);
         fc.add(ScalarCountDistinctAggregateDescriptor.FACTORY);
+        fc.add(ScalarCountNDistinctAggregateDescriptor.FACTORY);
         fc.add(ScalarAvgAggregateDescriptor.FACTORY);
         fc.add(ScalarAvgDistinctAggregateDescriptor.FACTORY);
         fc.add(ScalarSumAggregateDescriptor.FACTORY);
@@ -816,6 +828,7 @@
 
         // SQL aggregates
         fc.add(SqlCountAggregateDescriptor.FACTORY);
+        fc.add(SqlCountNAggregateDescriptor.FACTORY);
         fc.add(SqlAvgAggregateDescriptor.FACTORY);
         fc.add(LocalSqlAvgAggregateDescriptor.FACTORY);
         fc.add(IntermediateSqlAvgAggregateDescriptor.FACTORY);
@@ -867,6 +880,7 @@
 
         // SQL serializable aggregates
         fc.add(SerializableSqlCountAggregateDescriptor.FACTORY);
+        fc.add(SerializableSqlCountNAggregateDescriptor.FACTORY);
         fc.add(SerializableSqlAvgAggregateDescriptor.FACTORY);
         fc.add(SerializableLocalSqlAvgAggregateDescriptor.FACTORY);
         fc.add(SerializableIntermediateSqlAvgAggregateDescriptor.FACTORY);
@@ -903,6 +917,8 @@
         // SQL scalar aggregates
         fc.add(ScalarSqlCountAggregateDescriptor.FACTORY);
         fc.add(ScalarSqlCountDistinctAggregateDescriptor.FACTORY);
+        fc.add(ScalarSqlCountNDistinctAggregateDescriptor.FACTORY);
+        fc.add(ScalarSqlCountNAggregateDescriptor.FACTORY);
         fc.add(ScalarSqlAvgAggregateDescriptor.FACTORY);
         fc.add(ScalarSqlAvgDistinctAggregateDescriptor.FACTORY);
         fc.add(ScalarSqlSumAggregateDescriptor.FACTORY);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/DynamicPathResolver.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/DynamicPathResolver.java
index c6d1dbe..e3ccc33 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/DynamicPathResolver.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/DynamicPathResolver.java
@@ -32,7 +32,7 @@
 import org.apache.hyracks.util.string.UTF8CharBuffer;
 import org.apache.hyracks.util.string.UTF8StringUtil;
 
-final class DynamicPathResolver extends AbstractPathResolver {
+public final class DynamicPathResolver extends AbstractPathResolver {
     private final IScalarEvaluator pathEval;
     private final IWarningCollector warningCollector;
     private final StringBuilder dirStringBuilder;
@@ -40,7 +40,7 @@
     private final UTF8CharBuffer charBuffer;
     private final SourceLocation pathSourceLocation;
 
-    DynamicPathResolver(String fileExtension, char fileSeparator, int partition, IScalarEvaluator pathEval,
+    public DynamicPathResolver(String fileExtension, char fileSeparator, int partition, IScalarEvaluator pathEval,
             IWarningCollector warningCollector, SourceLocation pathSourceLocation) {
         super(fileExtension, fileSeparator, partition);
         this.pathEval = pathEval;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriter.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriter.java
index 95dc962..e8bae00 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriter.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriter.java
@@ -23,7 +23,7 @@
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
 
-final class ExternalFileWriter implements IExternalWriter {
+public final class ExternalFileWriter implements IExternalWriter {
     static final String UNRESOLVABLE_PATH = "UNRESOLVABLE_PATH";
     private final IPathResolver pathResolver;
     private final IExternalFileWriter writer;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriterFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriterFactory.java
index 5981584..f85a8ab 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriterFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/ExternalFileWriterFactory.java
@@ -18,52 +18,29 @@
  */
 package org.apache.asterix.runtime.writer;
 
-import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
-import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import org.apache.hyracks.algebricks.runtime.evaluators.EvaluatorContext;
 import org.apache.hyracks.algebricks.runtime.writers.IExternalWriter;
 import org.apache.hyracks.algebricks.runtime.writers.IExternalWriterFactory;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.IWarningCollector;
-import org.apache.hyracks.api.exceptions.SourceLocation;
 
 public class ExternalFileWriterFactory implements IExternalWriterFactory {
     private static final long serialVersionUID = 1412969574113419638L;
     private final IExternalFileWriterFactory writerFactory;
     private final IExternalPrinterFactory printerFactory;
-    private final String fileExtension;
     private final int maxResult;
-    private final IScalarEvaluatorFactory pathEvalFactory;
-    private final String staticPath;
-    private final SourceLocation pathSourceLocation;
+    private final IPathResolverFactory pathResolverFactory;
 
     public ExternalFileWriterFactory(IExternalFileWriterFactory writerFactory, IExternalPrinterFactory printerFactory,
-            String fileExtension, int maxResult, IScalarEvaluatorFactory pathEvalFactory, String staticPath,
-            SourceLocation pathSourceLocation) {
+            IPathResolverFactory pathResolverFactory, int maxResult) {
         this.writerFactory = writerFactory;
         this.printerFactory = printerFactory;
-        this.fileExtension = fileExtension;
+        this.pathResolverFactory = pathResolverFactory;
         this.maxResult = maxResult;
-        this.pathEvalFactory = pathEvalFactory;
-        this.staticPath = staticPath;
-        this.pathSourceLocation = pathSourceLocation;
     }
 
     @Override
     public IExternalWriter createWriter(IHyracksTaskContext context) throws HyracksDataException {
-        int partition = context.getTaskAttemptId().getTaskId().getPartition();
-        char fileSeparator = writerFactory.getSeparator();
-        IPathResolver resolver;
-        if (staticPath == null) {
-            EvaluatorContext evaluatorContext = new EvaluatorContext(context);
-            IScalarEvaluator pathEval = pathEvalFactory.createScalarEvaluator(evaluatorContext);
-            IWarningCollector warningCollector = context.getWarningCollector();
-            resolver = new DynamicPathResolver(fileExtension, fileSeparator, partition, pathEval, warningCollector,
-                    pathSourceLocation);
-        } else {
-            resolver = new StaticPathResolver(fileExtension, fileSeparator, partition, staticPath);
-        }
+        IPathResolver resolver = pathResolverFactory.createResolver(context);
         IExternalFileWriter writer = writerFactory.createWriter(context, printerFactory);
         return new ExternalFileWriter(resolver, writer, maxResult);
     }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolver.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolver.java
index 35b4ddd..c4a3b04 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolver.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolver.java
@@ -24,7 +24,7 @@
 /**
  * Path resolver which generates paths for the written files
  */
-interface IPathResolver {
+public interface IPathResolver {
 
     /**
      * Extract the partitioning values from the provided tuple and generates the file path
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolverFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolverFactory.java
new file mode 100644
index 0000000..18a8799
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/IPathResolverFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.writer;
+
+import java.io.Serializable;
+
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public interface IPathResolverFactory extends Serializable {
+
+    IPathResolver createResolver(IHyracksTaskContext ctx) throws HyracksDataException;
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/PathResolverFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/PathResolverFactory.java
new file mode 100644
index 0000000..8707cae
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/PathResolverFactory.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.writer;
+
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.evaluators.EvaluatorContext;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.IWarningCollector;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+public class PathResolverFactory implements IPathResolverFactory {
+    private static final long serialVersionUID = 8971234908711238L;
+    private final IExternalFileWriterFactory writerFactory;
+    private final String fileExtension;
+    private final IScalarEvaluatorFactory pathEvalFactory;
+    private final String staticPath;
+    private final SourceLocation pathSourceLocation;
+
+    public PathResolverFactory(IExternalFileWriterFactory writerFactory, String fileExtension,
+            IScalarEvaluatorFactory pathEvalFactory, String staticPath, SourceLocation pathSourceLocation) {
+        this.writerFactory = writerFactory;
+        this.fileExtension = fileExtension;
+        this.pathEvalFactory = pathEvalFactory;
+        this.pathSourceLocation = pathSourceLocation;
+        this.staticPath = staticPath;
+    }
+
+    @Override
+    public IPathResolver createResolver(IHyracksTaskContext context) throws HyracksDataException {
+        int partition = context.getTaskAttemptId().getTaskId().getPartition();
+        char fileSeparator = writerFactory.getSeparator();
+        IPathResolver resolver;
+        if (staticPath == null) {
+            EvaluatorContext evaluatorContext = new EvaluatorContext(context);
+            IScalarEvaluator pathEval = pathEvalFactory.createScalarEvaluator(evaluatorContext);
+            IWarningCollector warningCollector = context.getWarningCollector();
+            resolver = new DynamicPathResolver(fileExtension, fileSeparator, partition, pathEval, warningCollector,
+                    pathSourceLocation);
+        } else {
+            resolver = new StaticPathResolver(fileExtension, fileSeparator, partition, staticPath);
+        }
+        return resolver;
+    }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/StaticPathResolver.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/StaticPathResolver.java
index 52943ed..c5fa0dd 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/StaticPathResolver.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/writer/StaticPathResolver.java
@@ -20,10 +20,10 @@
 
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
 
-final class StaticPathResolver extends AbstractPathResolver {
+public final class StaticPathResolver extends AbstractPathResolver {
     private final String directoryPath;
 
-    StaticPathResolver(String fileExtension, char fileSeparator, int partition, String directoryPath) {
+    public StaticPathResolver(String fileExtension, char fileSeparator, int partition, String directoryPath) {
         super(fileExtension, fileSeparator, partition);
 
         if (!directoryPath.isEmpty() && directoryPath.charAt(directoryPath.length() - 1) != fileSeparator) {
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index e65c742..82534fb 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -612,6 +612,22 @@
               <contentFile>protobuf_2.5.0_LICENSE.txt</contentFile>
             </license>
             <license>
+              <displayName>Eclipse Distribution License - v 1.0</displayName>
+              <url>https://www.eclipse.org/org/documents/edl-v10.php</url>
+              <aliasUrls>
+                <aliasUrl>https://github.com/locationtech/jts/blob/master/LICENSE_EDLv1.txt</aliasUrl>
+              </aliasUrls>
+              <metric>1050</metric>
+            </license>
+            <license>
+              <displayName>Eclipse Public License, Version 2.0</displayName>
+              <url>https://www.eclipse.org/legal/epl-2.0/</url>
+              <aliasUrls>
+                <aliasUrl>https://github.com/locationtech/jts/blob/master/LICENSE_EPLv2.txt</aliasUrl>
+              </aliasUrls>
+              <metric>1100</metric>
+            </license>
+            <license>
               <displayName>a BSD 3-clause license</displayName>
               <url>https://github.com/codehaus/paranamer-git/blob/paranamer-2.3/LICENSE.txt</url>
               <contentFile>paranamer-2.3_LICENSE.txt</contentFile>
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index cb01868..62bec6b 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -96,7 +96,7 @@
     <log4j.version>2.22.1</log4j.version>
     <awsjavasdk.version>2.24.9</awsjavasdk.version>
     <awsjavasdk.crt.version>0.29.10</awsjavasdk.crt.version>
-    <parquet.version>1.14.1</parquet.version>
+    <parquet.version>1.14.3</parquet.version>
     <hadoop-awsjavasdk.version>1.12.637</hadoop-awsjavasdk.version>
     <azureblobjavasdk.version>12.25.1</azureblobjavasdk.version>
     <azurecommonjavasdk.version>12.24.1</azurecommonjavasdk.version>
@@ -752,14 +752,6 @@
     </profile>
     <profile>
       <id>azurite-tests</id>
-      <activation>
-        <os>
-          <family>unix</family>
-        </os>
-        <property>
-          <name>!skipTests</name>
-        </property>
-      </activation>
       <properties>
         <azurite.npm.install.stage>process-classes</azurite.npm.install.stage>
         <azurite.install.stage>generate-test-resources</azurite.install.stage>
@@ -1554,9 +1546,9 @@
         <version>3.8.4</version>
       </dependency>
       <dependency>
-        <groupId>com.esri.geometry</groupId>
-        <artifactId>esri-geometry-api</artifactId>
-        <version>2.0.0</version>
+        <groupId>org.locationtech.jts</groupId>
+        <artifactId>jts-core</artifactId>
+        <version>1.19.0</version>
       </dependency>
       <dependency>
         <groupId>org.reflections</groupId>
diff --git a/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j--1.16.1_MIT_License.txt b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j--1.16.1_MIT_License.txt
new file mode 100644
index 0000000..d1ca00f
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j--1.16.1_MIT_License.txt
@@ -0,0 +1,21 @@
+    MIT License
+
+    Copyright (c) Microsoft Corporation. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j-persistence-extension--1.3.0_MIT_License.txt b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j-persistence-extension--1.3.0_MIT_License.txt
new file mode 100644
index 0000000..d1ca00f
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j-persistence-extension--1.3.0_MIT_License.txt
@@ -0,0 +1,21 @@
+    MIT License
+
+    Copyright (c) Microsoft Corporation. All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/github.com_locationtech_jts_blob_master_LICENSE_EDLv1.txt b/asterixdb/src/main/licenses/content/github.com_locationtech_jts_blob_master_LICENSE_EDLv1.txt
new file mode 100644
index 0000000..1071fed
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/github.com_locationtech_jts_blob_master_LICENSE_EDLv1.txt
@@ -0,0 +1,30 @@
+Eclipse Distribution License - v 1.0
+
+Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+  Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+  Redistributions in binary form must reproduce the above copyright notice,
+  this list of conditions and the following disclaimer in the documentation
+  and/or other materials provided with the distribution.
+
+  Neither the name of the Eclipse Foundation, Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from this
+  software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/opensource.org_licenses_BSD-2-Clause.txt b/asterixdb/src/main/licenses/content/opensource.org_licenses_BSD-2-Clause.txt
new file mode 100644
index 0000000..226190a
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/opensource.org_licenses_BSD-2-Clause.txt
@@ -0,0 +1,24 @@
+BSD 2-Clause License
+
+Copyright (c) 2024, Couchbase, Inc.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/templates/source_licenses.ftl b/asterixdb/src/main/licenses/templates/source_licenses.ftl
index 97b032b..95b6f3a 100644
--- a/asterixdb/src/main/licenses/templates/source_licenses.ftl
+++ b/asterixdb/src/main/licenses/templates/source_licenses.ftl
@@ -133,4 +133,196 @@
 </#if>
 <#if !asterixDashboardSkip!false>
     <#include "../../../../asterix-dashboard/src/main/licenses/dashboard-source-license.ftl">
-</#if>
\ No newline at end of file
+</#if>
+<@license component="AsterixDB OM" files=[
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java",
+                    "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java"
+                ]>
+    Classes are modified to support the org.locationtech.jts package instead of com.vividsolutions.jts
+    Copyright BeDataDriven
+
+    Apache License
+    Version 2.0, January 2004
+    http://www.apache.org/licenses/
+    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+    1. Definitions.
+    "License" shall mean the terms and conditions for use, reproduction,
+    and distribution as defined by Sections 1 through 9 of this document.
+    "Licensor" shall mean the copyright owner or entity authorized by
+    the copyright owner that is granting the License.
+    "Legal Entity" shall mean the union of the acting entity and all
+    other entities that control, are controlled by, or are under common
+    control with that entity. For the purposes of this definition,
+    "control" means (i) the power, direct or indirect, to cause the
+    direction or management of such entity, whether by contract or
+    otherwise, or (ii) ownership of fifty percent (50%) or more of the
+    outstanding shares, or (iii) beneficial ownership of such entity.
+    "You" (or "Your") shall mean an individual or Legal Entity
+    exercising permissions granted by this License.
+    "Source" form shall mean the preferred form for making modifications,
+    including but not limited to software source code, documentation
+    source, and configuration files.
+    "Object" form shall mean any form resulting from mechanical
+    transformation or translation of a Source form, including but
+    not limited to compiled object code, generated documentation,
+    and conversions to other media types.
+    "Work" shall mean the work of authorship, whether in Source or
+    Object form, made available under the License, as indicated by a
+    copyright notice that is included in or attached to the work
+    (an example is provided in the Appendix below).
+    "Derivative Works" shall mean any work, whether in Source or Object
+    form, that is based on (or derived from) the Work and for which the
+    editorial revisions, annotations, elaborations, or other modifications
+    represent, as a whole, an original work of authorship. For the purposes
+    of this License, Derivative Works shall not include works that remain
+    separable from, or merely link (or bind by name) to the interfaces of,
+    the Work and Derivative Works thereof.
+    "Contribution" shall mean any work of authorship, including
+    the original version of the Work and any modifications or additions
+    to that Work or Derivative Works thereof, that is intentionally
+    submitted to Licensor for inclusion in the Work by the copyright owner
+    or by an individual or Legal Entity authorized to submit on behalf of
+    the copyright owner. For the purposes of this definition, "submitted"
+    means any form of electronic, verbal, or written communication sent
+    to the Licensor or its representatives, including but not limited to
+    communication on electronic mailing lists, source code control systems,
+    and issue tracking systems that are managed by, or on behalf of, the
+    Licensor for the purpose of discussing and improving the Work, but
+    excluding communication that is conspicuously marked or otherwise
+    designated in writing by the copyright owner as "Not a Contribution."
+    "Contributor" shall mean Licensor and any individual or Legal Entity
+    on behalf of whom a Contribution has been received by Licensor and
+    subsequently incorporated within the Work.
+    2. Grant of Copyright License. Subject to the terms and conditions of
+    this License, each Contributor hereby grants to You a perpetual,
+    worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+    copyright license to reproduce, prepare Derivative Works of,
+    publicly display, publicly perform, sublicense, and distribute the
+    Work and such Derivative Works in Source or Object form.
+    3. Grant of Patent License. Subject to the terms and conditions of
+    this License, each Contributor hereby grants to You a perpetual,
+    worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+    (except as stated in this section) patent license to make, have made,
+    use, offer to sell, sell, import, and otherwise transfer the Work,
+    where such license applies only to those patent claims licensable
+    by such Contributor that are necessarily infringed by their
+    Contribution(s) alone or by combination of their Contribution(s)
+    with the Work to which such Contribution(s) was submitted. If You
+    institute patent litigation against any entity (including a
+    cross-claim or counterclaim in a lawsuit) alleging that the Work
+    or a Contribution incorporated within the Work constitutes direct
+    or contributory patent infringement, then any patent licenses
+    granted to You under this License for that Work shall terminate
+    as of the date such litigation is filed.
+    4. Redistribution. You may reproduce and distribute copies of the
+    Work or Derivative Works thereof in any medium, with or without
+    modifications, and in Source or Object form, provided that You
+    meet the following conditions:
+    (a) You must give any other recipients of the Work or
+    Derivative Works a copy of this License; and
+    (b) You must cause any modified files to carry prominent notices
+    stating that You changed the files; and
+    (c) You must retain, in the Source form of any Derivative Works
+    that You distribute, all copyright, patent, trademark, and
+    attribution notices from the Source form of the Work,
+    excluding those notices that do not pertain to any part of
+    the Derivative Works; and
+    (d) If the Work includes a "NOTICE" text file as part of its
+    distribution, then any Derivative Works that You distribute must
+    include a readable copy of the attribution notices contained
+    within such NOTICE file, excluding those notices that do not
+    pertain to any part of the Derivative Works, in at least one
+    of the following places: within a NOTICE text file distributed
+    as part of the Derivative Works; within the Source form or
+    documentation, if provided along with the Derivative Works; or,
+    within a display generated by the Derivative Works, if and
+    wherever such third-party notices normally appear. The contents
+    of the NOTICE file are for informational purposes only and
+    do not modify the License. You may add Your own attribution
+    notices within Derivative Works that You distribute, alongside
+    or as an addendum to the NOTICE text from the Work, provided
+    that such additional attribution notices cannot be construed
+    as modifying the License.
+    You may add Your own copyright statement to Your modifications and
+    may provide additional or different license terms and conditions
+    for use, reproduction, or distribution of Your modifications, or
+    for any such Derivative Works as a whole, provided Your use,
+    reproduction, and distribution of the Work otherwise complies with
+    the conditions stated in this License.
+    5. Submission of Contributions. Unless You explicitly state otherwise,
+    any Contribution intentionally submitted for inclusion in the Work
+    by You to the Licensor shall be under the terms and conditions of
+    this License, without any additional terms or conditions.
+    Notwithstanding the above, nothing herein shall supersede or modify
+    the terms of any separate license agreement you may have executed
+    with Licensor regarding such Contributions.
+    6. Trademarks. This License does not grant permission to use the trade
+    names, trademarks, service marks, or product names of the Licensor,
+    except as required for reasonable and customary use in describing the
+    origin of the Work and reproducing the content of the NOTICE file.
+    7. Disclaimer of Warranty. Unless required by applicable law or
+    agreed to in writing, Licensor provides the Work (and each
+    Contributor provides its Contributions) on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+    implied, including, without limitation, any warranties or conditions
+    of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+    PARTICULAR PURPOSE. You are solely responsible for determining the
+    appropriateness of using or redistributing the Work and assume any
+    risks associated with Your exercise of permissions under this License.
+    8. Limitation of Liability. In no event and under no legal theory,
+    whether in tort (including negligence), contract, or otherwise,
+    unless required by applicable law (such as deliberate and grossly
+    negligent acts) or agreed to in writing, shall any Contributor be
+    liable to You for damages, including any direct, indirect, special,
+    incidental, or consequential damages of any character arising as a
+    result of this License or out of the use or inability to use the
+    Work (including but not limited to damages for loss of goodwill,
+    work stoppage, computer failure or malfunction, or any and all
+    other commercial damages or losses), even if such Contributor
+    has been advised of the possibility of such damages.
+    9. Accepting Warranty or Additional Liability. While redistributing
+    the Work or Derivative Works thereof, You may choose to offer,
+    and charge a fee for, acceptance of support, warranty, indemnity,
+    or other liability obligations and/or rights consistent with this
+    License. However, in accepting such obligations, You may act only
+    on Your own behalf and on Your sole responsibility, not on behalf
+    of any other Contributor, and only if You agree to indemnify,
+    defend, and hold each Contributor harmless for any liability
+    incurred by, or claims asserted against, such Contributor by reason
+    of your accepting any such warranty or additional liability.
+    END OF TERMS AND CONDITIONS
+    APPENDIX: How to apply the Apache License to your work.
+    To apply the Apache License to your work, attach the following
+    boilerplate notice, with the fields enclosed by brackets "[]"
+    replaced with your own identifying information. (Don't include
+    the brackets!)  The text should be enclosed in the appropriate
+    comment syntax for the file format. We also recommend that a
+    file or class name and description of purpose be included on the
+    same "printed page" as the copyright notice for easier
+    identification within third-party archives.
+    Copyright [yyyy] [name of copyright owner]
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+    http://www.apache.org/licenses/LICENSE-2.0
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    For more details on the original project and licensing, please visit
+    https://github.com/bedatadriven/jackson-datatype-jts.
+</@license>
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
index 98c4223..0b29f7d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
@@ -46,4 +46,6 @@
             StorageUtil.getIntSizeInBytes(8, StorageUtil.StorageUnit.KILOBYTE);
     public static final boolean BATCH_LOOKUP_DEFAULT = true;
     public static final boolean COLUMN_FILTER_DEFAULT = true;
+    public static final boolean ORDERED_FIELDS = false;
+    public static final int MAX_VARIABLE_OCCURRENCES_INLINING_DEFAULT = 128;
 }
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
index 11171a1..163fb16 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
@@ -62,6 +62,9 @@
     private static final String MIN_JOIN_FRAMES = "MIN_JOIN_FRAMES";
     private static final String MIN_GROUP_FRAMES = "MIN_GROUP_FRAMES";
     private static final String MIN_WINDOW_FRAMES = "MIN_WINDOW_FRAMES";
+    private static final String MAX_VARIABLE_OCCURRENCES_INLINING = "MAX_VARIABLE_OCCURRENCES_INLINING";
+
+    private static final String ORDER_FIELDS = "ORDER_FIELDS";
 
     private final Properties properties = new Properties();
 
@@ -159,6 +162,10 @@
         setInt(MAX_FRAMES_FOR_TEXTSEARCH, frameLimit);
     }
 
+    public void setOrderFields(boolean orderFields) {
+        setBoolean(ORDER_FIELDS, orderFields);
+    }
+
     public int getHashGroupByTableSize() {
         return getInt(DEFAULT_HASH_GROUP_TABLE_SIZE, 10485767);
     }
@@ -235,6 +242,10 @@
         return getBoolean(SORT_PARALLEL, AlgebricksConfig.SORT_PARALLEL_DEFAULT);
     }
 
+    public boolean isOrderField() {
+        return getBoolean(ORDER_FIELDS, AlgebricksConfig.ORDERED_FIELDS);
+    }
+
     public void setSortParallel(boolean sortParallel) {
         setBoolean(SORT_PARALLEL, sortParallel);
     }
@@ -383,6 +394,14 @@
         return properties.getProperty(property);
     }
 
+    public int getMaxVariableOccurrencesForInlining() {
+        return getInt(MAX_VARIABLE_OCCURRENCES_INLINING, AlgebricksConfig.MAX_VARIABLE_OCCURRENCES_INLINING_DEFAULT);
+    }
+
+    public void setMaxVariableOccurrencesForInlining(int maxVariableOccurrencesForInlining) {
+        setInt(MAX_VARIABLE_OCCURRENCES_INLINING, maxVariableOccurrencesForInlining);
+    }
+
     private void setInt(String property, int value) {
         properties.setProperty(property, Integer.toString(value));
     }
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
index 6ad50e6..5970f4f 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
@@ -19,6 +19,7 @@
 package org.apache.hyracks.algebricks.rewriter.rules;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -84,6 +85,7 @@
     private final List<LogicalVariable> usedVars = new ArrayList<>();
     // map of variables and the counts of how many times they were used
     private final Map<LogicalVariable, MutableInt> usedVariableCounter = new HashMap<>();
+    private final Map<LogicalVariable, Map<LogicalVariable, Integer>> totalLeafVariableCounter = new HashMap<>();
 
     @Override
     public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
@@ -113,6 +115,7 @@
         inlineVisitor.setContext(context);
         subTreesDone.clear();
         usedVariableCounter.clear();
+        totalLeafVariableCounter.clear();
     }
 
     protected boolean performBottomUpAction(ILogicalOperator op) throws AlgebricksException {
@@ -168,6 +171,20 @@
             }
         }
 
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            computeLeafVariablesCount(assignOp);
+            List<LogicalVariable> vars = assignOp.getVariables();
+            for (LogicalVariable variable : vars) {
+                // Don't inline variables that potentially reference a large number of the same leaf variable.
+                Map<LogicalVariable, Integer> varMap = totalLeafVariableCounter.get(variable);
+                if (varMap != null && !varMap.isEmpty() && Collections.max(varMap.values()) > context
+                        .getPhysicalOptimizationConfig().getMaxVariableOccurrencesForInlining()) {
+                    varAssignRhs.remove(variable);
+                }
+            }
+        }
+
         // Descend into subplan
         if (op.getOperatorTag() == LogicalOperatorTag.SUBPLAN || op.getOperatorTag() == LogicalOperatorTag.WINDOW) {
             List<ILogicalPlan> nestedPlans = ((AbstractOperatorWithNestedPlans) op).getNestedPlans();
@@ -259,6 +276,28 @@
         }
     }
 
+    private void computeLeafVariablesCount(AssignOperator assignOp) {
+        List<LogicalVariable> vars = assignOp.getVariables();
+        List<Mutable<ILogicalExpression>> exprs = assignOp.getExpressions();
+        for (int i = 0; i < vars.size(); i++) {
+            LogicalVariable variable = vars.get(i);
+            ILogicalExpression expr = exprs.get(i).getValue();
+            usedVars.clear();
+            expr.getUsedVariables(usedVars);
+            Map<LogicalVariable, Integer> varMap =
+                    totalLeafVariableCounter.computeIfAbsent(variable, k -> new HashMap<>());
+            for (LogicalVariable usedVar : usedVars) {
+                if (totalLeafVariableCounter.containsKey(usedVar)) {
+                    for (Map.Entry<LogicalVariable, Integer> entry : totalLeafVariableCounter.get(usedVar).entrySet()) {
+                        varMap.put(entry.getKey(), entry.getValue() + varMap.getOrDefault(entry.getKey(), 0));
+                    }
+                } else {
+                    varMap.put(usedVar, 1);
+                }
+            }
+        }
+    }
+
     public static class InlineVariablesVisitor extends LogicalExpressionReferenceTransformVisitor
             implements ILogicalExpressionReferenceTransform {
 
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/IWriterPartitioner.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/IWriterPartitioner.java
index cb72aec..7cf4d05 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/IWriterPartitioner.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/IWriterPartitioner.java
@@ -21,7 +21,7 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 
-interface IWriterPartitioner {
+public interface IWriterPartitioner {
     boolean isNewPartition(FrameTupleAccessor tupleAccessor, int index) throws HyracksDataException;
 
 }
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/SinkExternalWriterRuntimeFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/SinkExternalWriterRuntimeFactory.java
index 321828f..3612511 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/SinkExternalWriterRuntimeFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/SinkExternalWriterRuntimeFactory.java
@@ -23,7 +23,6 @@
 import org.apache.hyracks.algebricks.runtime.writers.IExternalWriter;
 import org.apache.hyracks.algebricks.runtime.writers.IExternalWriterFactory;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -31,64 +30,35 @@
 public final class SinkExternalWriterRuntimeFactory extends AbstractPushRuntimeFactory {
     private static final long serialVersionUID = -2215789207336628581L;
     private final int sourceColumn;
-    private final int[] partitionColumns;
-    private final IBinaryComparatorFactory[] partitionComparatorFactories;
-    private final boolean partitionByKey;
     private final RecordDescriptor inputRecordDescriptor;
     private final IExternalWriterFactory writerFactory;
+    private final WriterPartitionerFactory partitionerFactory;
 
     public SinkExternalWriterRuntimeFactory(int sourceColumn, int[] partitionColumns,
             IBinaryComparatorFactory[] partitionComparatorFactories, RecordDescriptor inputRecordDescriptor,
             IExternalWriterFactory writerFactory) {
-        this(sourceColumn, partitionColumns, partitionComparatorFactories, false, inputRecordDescriptor, writerFactory);
+        this(sourceColumn, inputRecordDescriptor, writerFactory,
+                new WriterPartitionerFactory(partitionColumns, partitionComparatorFactories));
     }
 
     public SinkExternalWriterRuntimeFactory(int sourceColumn, RecordDescriptor inputRecordDescriptor,
             IExternalWriterFactory writerFactory) {
-        this(sourceColumn, null, null, true, inputRecordDescriptor, writerFactory);
+        this(sourceColumn, inputRecordDescriptor, writerFactory, new WriterPartitionerFactory());
     }
 
-    private SinkExternalWriterRuntimeFactory(int sourceColumn, int[] partitionColumns,
-            IBinaryComparatorFactory[] partitionComparatorFactories, boolean partitionByKey,
-            RecordDescriptor inputRecordDescriptor, IExternalWriterFactory writerFactory) {
+    private SinkExternalWriterRuntimeFactory(int sourceColumn, RecordDescriptor inputRecordDescriptor,
+            IExternalWriterFactory writerFactory, WriterPartitionerFactory partitionerFactory) {
         this.sourceColumn = sourceColumn;
-        this.partitionColumns = partitionColumns;
-        this.partitionComparatorFactories = partitionComparatorFactories;
-        this.partitionByKey = partitionByKey;
         this.inputRecordDescriptor = inputRecordDescriptor;
         this.writerFactory = writerFactory;
+        this.partitionerFactory = partitionerFactory;
     }
 
     @Override
     public IPushRuntime[] createPushRuntime(IHyracksTaskContext ctx) throws HyracksDataException {
         IExternalWriter writer = writerFactory.createWriter(ctx);
-        SinkExternalWriterRuntime runtime =
-                new SinkExternalWriterRuntime(sourceColumn, createPartitioner(), inputRecordDescriptor, writer);
+        SinkExternalWriterRuntime runtime = new SinkExternalWriterRuntime(sourceColumn,
+                partitionerFactory.createPartitioner(), inputRecordDescriptor, writer);
         return new IPushRuntime[] { runtime };
     }
-
-    /**
-     * Creates the writer partitioner based on the provided parameters
-     *
-     * @return writer partitioner
-     */
-    private IWriterPartitioner createPartitioner() {
-        // key writer partitioner
-        if (partitionByKey) {
-            return KeyWriterPartitioner.INSTANCE;
-        }
-
-        // writer partitioner
-        if (partitionColumns.length > 0) {
-            IBinaryComparator[] comparators = new IBinaryComparator[partitionComparatorFactories.length];
-            for (int i = 0; i < partitionComparatorFactories.length; i++) {
-                comparators[i] = partitionComparatorFactories[i].createBinaryComparator();
-            }
-
-            return new WriterPartitioner(partitionColumns, comparators);
-        }
-
-        // no-op partitioner
-        return new NoOpWriterPartitioner();
-    }
 }
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/WriterPartitionerFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/WriterPartitionerFactory.java
new file mode 100644
index 0000000..1d5b089
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/writer/WriterPartitionerFactory.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.algebricks.runtime.operators.writer;
+
+import java.io.Serializable;
+
+import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
+import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+
+public class WriterPartitionerFactory implements Serializable {
+    private static final long serialVersionUID = 8971234908711239L;
+    private final boolean partitionByKey;
+    private final int[] partitionColumns;
+    private final IBinaryComparatorFactory[] partitionComparatorFactories;
+
+    public WriterPartitionerFactory(int[] partitionColumns, IBinaryComparatorFactory[] partitionComparatorFactories) {
+        this(partitionColumns, partitionComparatorFactories, false);
+    }
+
+    public WriterPartitionerFactory() {
+        this(null, null, true);
+    }
+
+    private WriterPartitionerFactory(int[] partitionColumns, IBinaryComparatorFactory[] partitionComparatorFactories,
+            boolean partitionByKey) {
+        this.partitionColumns = partitionColumns;
+        this.partitionComparatorFactories = partitionComparatorFactories;
+        this.partitionByKey = partitionByKey;
+    }
+
+    /**
+     * Creates the writer partitioner based on the provided parameters
+     *
+     * @return writer partitioner
+     */
+    public IWriterPartitioner createPartitioner() {
+        // key writer partitioner
+        if (partitionByKey) {
+            return KeyWriterPartitioner.INSTANCE;
+        }
+
+        // writer partitioner
+        if (partitionColumns.length > 0) {
+            IBinaryComparator[] comparators = new IBinaryComparator[partitionComparatorFactories.length];
+            for (int i = 0; i < partitionComparatorFactories.length; i++) {
+                comparators[i] = partitionComparatorFactories[i].createBinaryComparator();
+            }
+
+            return new WriterPartitioner(partitionColumns, comparators);
+        }
+
+        // no-op partitioner
+        return new NoOpWriterPartitioner();
+    }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
index 59a4da4..7cb107d 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
@@ -158,6 +158,10 @@
     UNSUPPORTED_WRITE_SPEC(128),
     JOB_REJECTED(129),
     FRAME_BIGGER_THAN_SORT_MEMORY(130),
+    RESULT_DOES_NOT_FOLLOW_SCHEMA(131),
+    EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA(132),
+    EMPTY_TYPE_INFERRED(133),
+    SCHEMA_LIMIT_EXCEEDED(134),
 
     // Compilation error codes.
     RULECOLLECTION_NOT_INSTANCE_OF_LIST(10000),
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java
index a6520c6..75cd23d 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java
@@ -24,6 +24,7 @@
 import java.nio.channels.WritableByteChannel;
 import java.util.List;
 import java.util.Set;
+import java.util.function.Predicate;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -296,4 +297,12 @@
      * @param bulkOperation the operation to perform
      */
     void performBulkOperation(IIOBulkOperation bulkOperation) throws HyracksDataException;
+
+    /**
+     * Lists the storage files and apply the provided predicate to calculate the total size of the resource
+     *
+     * @param relativePathFilter predicate
+     * @return total size of resource accepting the predicate
+     */
+    long getSize(Predicate<String> relativePathFilter);
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
index 5045c86..4022961 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
@@ -332,6 +332,10 @@
 
     public static String toString(byte[] bytes, int start) {
         StringBuilder builder = new StringBuilder();
+        return toString(bytes, start, builder);
+    }
+
+    public static String toString(byte[] bytes, int start, StringBuilder builder) {
         return toString(builder, bytes, start).toString();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
index e94c12e..e1fbe30 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
@@ -148,6 +148,10 @@
 128 = Unsupported copy to specification: PARTITION BY %1$s, ORDER BY %2$s
 129 = Job %1$s failed to run. Cluster is not accepting jobs.
 130 = Frame data=%1$s (requiring %2$s) is bigger than the sort budget. Used=%3$s, max=%4$s. Please increase the sort memory budget.
+131 = Result does not follow the schema, %1$s type expected but found %2$s type at '%3$s'
+132 = Extra field in the result, field '%1$s' does not exist at '%2$s' in the schema
+133 = Schema could not be inferred, empty types found in the result
+134 = Schema Limit exceeded, maximum number of heterogeneous schemas allowed : '%1$s'
 
 10000 = The given rule collection %1$s is not an instance of the List class.
 10001 = Cannot compose partition constraint %1$s with %2$s
diff --git a/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/cache/unit/AbstractIndexUnit.java b/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/cache/unit/AbstractIndexUnit.java
index 1532340..54ea23c 100644
--- a/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/cache/unit/AbstractIndexUnit.java
+++ b/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/cache/unit/AbstractIndexUnit.java
@@ -36,7 +36,7 @@
 
     public final void open() {
         lastAccessTime.set(System.nanoTime());
-        openCounter.get();
+        openCounter.incrementAndGet();
     }
 
     public final void close() {
diff --git a/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/filesystem/PhysicalDrive.java b/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/filesystem/PhysicalDrive.java
index 7010e88..28069a9 100644
--- a/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/filesystem/PhysicalDrive.java
+++ b/hyracks-fullstack/hyracks/hyracks-cloud/src/main/java/org/apache/hyracks/cloud/filesystem/PhysicalDrive.java
@@ -53,17 +53,18 @@
 
     @Override
     public boolean computeAndCheckIsPressured() {
-        long usedSpace = getUsedSpace();
+        long usedSpace = getUsedSpace(drivePaths);
         long pressureCapacity = diskSpace.getPressureCapacity();
         boolean isPressured = usedSpace > pressureCapacity;
         this.usedSpace.set(usedSpace);
 
-        if (isPressured) {
-            LOGGER.info("Used space: {}, pressureCapacity: {} (isPressured: {})",
+        if (usedSpace >= diskSpace.getAllocatedCapacity()) {
+            LOGGER.warn(
+                    "Allocated disk space has been exceeded. Used space: {}, pressureCapacity: {} (isPressured: {})",
                     StorageUtil.toHumanReadableSize(usedSpace), StorageUtil.toHumanReadableSize(pressureCapacity),
                     true);
-        } else if (usedSpace >= diskSpace.getAllocatedCapacity()) {
-            LOGGER.warn("Allocated disk space has been exceeded. Used space: {}, pressureCapacity: {}",
+        } else if (isPressured) {
+            LOGGER.info("Used space: {}, pressureCapacity: {} (isPressured: {})",
                     StorageUtil.toHumanReadableSize(usedSpace), StorageUtil.toHumanReadableSize(pressureCapacity),
                     true);
         } else {
@@ -85,10 +86,9 @@
         return usedSpace.get() < diskSpace.getPressureCapacity();
     }
 
-    private long getUsedSpace() {
+    public static long getUsedSpace(List<FileStore> drivePaths) {
         long totalUsedSpace = 0;
-        for (int i = 0; i < drivePaths.size(); i++) {
-            FileStore device = drivePaths.get(i);
+        for (FileStore device : drivePaths) {
             try {
                 totalUsedSpace += getTotalSpace(device) - getUsableSpace(device);
             } catch (HyracksDataException e) {
@@ -121,7 +121,7 @@
         return new DiskSpace(allocatedCapacity, pressureCapacity);
     }
 
-    private static List<FileStore> getDrivePaths(List<IODeviceHandle> deviceHandles) throws HyracksDataException {
+    public static List<FileStore> getDrivePaths(List<IODeviceHandle> deviceHandles) throws HyracksDataException {
         Set<String> distinctDrives = new HashSet<>();
         List<FileStore> fileStores = new ArrayList<>();
         for (IODeviceHandle handle : deviceHandles) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java
index b35111e..960f23b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java
@@ -40,6 +40,7 @@
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
+import java.util.function.Predicate;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.exceptions.ErrorCode;
@@ -67,6 +68,7 @@
      */
     private static final Logger LOGGER = LogManager.getLogger();
     private static final String WORKSPACE_FILE_SUFFIX = ".waf";
+    private static final String STORAGE_ROOT_DIR_NAME = "storage";
     private static final FilenameFilter WORKSPACE_FILES_FILTER = (dir, name) -> name.endsWith(WORKSPACE_FILE_SUFFIX);
     /*
      * Finals
@@ -76,7 +78,7 @@
     private final ExecutorService executor;
     private final BlockingQueue<IoRequest> submittedRequests;
     private final BlockingQueue<IoRequest> freeRequests;
-    private final List<IODeviceHandle> ioDevices;
+    protected final List<IODeviceHandle> ioDevices;
     private final List<IODeviceHandle> workspaces;
     private final IFileDeviceResolver deviceComputer;
     /*
@@ -601,6 +603,36 @@
         ((AbstractBulkOperation) bulkOperation).performOperation();
     }
 
+    @Override
+    public long getSize(Predicate<String> relativePathFilter) {
+        long totalSize = 0;
+
+        // get cached files (read from disk)
+        for (IODeviceHandle deviceHandle : getIODevices()) {
+            FileReference storageRoot = deviceHandle.createFileRef(STORAGE_ROOT_DIR_NAME);
+
+            Set<FileReference> deviceFiles;
+            try {
+                deviceFiles = list(storageRoot, IoUtil.NO_OP_FILTER);
+            } catch (Throwable th) {
+                LOGGER.info("Failed to get local storage files for root {}", storageRoot.getRelativePath(), th);
+                continue;
+            }
+
+            for (FileReference fileReference : deviceFiles) {
+                try {
+                    if (relativePathFilter.test(fileReference.getRelativePath())) {
+                        totalSize += fileReference.getFile().length();
+                    }
+                } catch (Throwable th) {
+                    LOGGER.info("Encountered issue for local storage file {}", fileReference.getRelativePath(), th);
+                }
+            }
+        }
+
+        return totalSize;
+    }
+
     public void setSpaceMaker(IDiskSpaceMaker spaceMaker) {
         this.spaceMaker = spaceMaker;
     }
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 51bdd2f..20c3e21 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -75,7 +75,7 @@
     <snappy.version>1.1.10.5</snappy.version>
     <jackson.version>2.14.3</jackson.version>
     <jackson-databind.version>${jackson.version}</jackson-databind.version>
-    <netty.version>4.1.101.Final</netty.version>
+    <netty.version>4.1.112.Final</netty.version>
     <asm.version>9.3</asm.version>
 
     <implementation.title>Apache Hyracks and Algebricks - ${project.name}</implementation.title>