Merge branch 'gerrit/goldfish' into 'master'
Change-Id: Ie8dd9fb354c3cf3252eeb328e12539189c203527
diff --git a/asterixdb/LICENSE b/asterixdb/LICENSE
index 4c63d01..72413f8 100644
--- a/asterixdb/LICENSE
+++ b/asterixdb/LICENSE
@@ -517,6 +517,202 @@
limitations under the License.
---
+ Portions of the AsterixDB OM
+ located at:
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java,
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java,
+ and
+ asterix-app/src/main/resources/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java
+
+ are available under the following license:
+---
+ Classes are modified to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ Copyright BeDataDriven
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+ 1. Definitions.
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+ END OF TERMS AND CONDITIONS
+ APPENDIX: How to apply the Apache License to your work.
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+ Copyright [yyyy] [name of copyright owner]
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ For more details on the original project and licensing, please visit
+ https://github.com/bedatadriven/jackson-datatype-jts.
+---
Portions of the AsterixDB API examples
located at:
asterix-examples/src/main/resources/admaql101-demo/bottle.py,
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java
index 4469dbd..78b1e2b 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/SqlppCompilationProvider.java
@@ -95,11 +95,11 @@
CompilerProperties.COMPILER_FORCE_JOIN_ORDER_KEY, CompilerProperties.COMPILER_QUERY_PLAN_SHAPE_KEY,
CompilerProperties.COMPILER_MIN_MEMORY_ALLOCATION_KEY, CompilerProperties.COMPILER_COLUMN_FILTER_KEY,
CompilerProperties.COMPILER_BATCH_LOOKUP_KEY, FunctionUtil.IMPORT_PRIVATE_FUNCTIONS,
- FuzzyUtils.SIM_FUNCTION_PROP_NAME, FuzzyUtils.SIM_THRESHOLD_PROP_NAME,
- StartFeedStatement.WAIT_FOR_COMPLETION, FeedActivityDetails.FEED_POLICY_NAME,
- FeedActivityDetails.COLLECT_LOCATIONS, SqlppQueryRewriter.INLINE_WITH_OPTION,
- SqlppExpressionToPlanTranslator.REWRITE_IN_AS_OR_OPTION, "hash_merge", "output-record-type",
- DisjunctivePredicateToJoinRule.REWRITE_OR_AS_JOIN_OPTION,
+ CompilerProperties.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING_KEY, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
+ FuzzyUtils.SIM_THRESHOLD_PROP_NAME, StartFeedStatement.WAIT_FOR_COMPLETION,
+ FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS,
+ SqlppQueryRewriter.INLINE_WITH_OPTION, SqlppExpressionToPlanTranslator.REWRITE_IN_AS_OR_OPTION,
+ "hash_merge", "output-record-type", DisjunctivePredicateToJoinRule.REWRITE_OR_AS_JOIN_OPTION,
SetAsterixPhysicalOperatorsRule.REWRITE_ATTEMPT_BATCH_ASSIGN,
EquivalenceClassUtils.REWRITE_INTERNAL_QUERYUID_PK, SqlppQueryRewriter.SQL_COMPAT_OPTION,
JoinEnum.CBO_FULL_ENUM_LEVEL_KEY, JoinEnum.CBO_CP_ENUM_KEY));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java
index d56d38a..b5c290f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinCondition.java
@@ -29,6 +29,7 @@
protected boolean outerJoin;
private boolean derived = false;
protected boolean partOfComposite = false;
+ protected boolean deleted = false;
protected int numberOfVars = 0; // how many variables
protected int componentNumber = 0; // for identifying if join graph is connected
protected int datasetBits;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java
index d6ea457..26707e1 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/JoinNode.java
@@ -300,10 +300,10 @@
parent.getInputs().get(0).setValue(deepCopyofScan);
finalDatasetCard = origDatasetCard = idxDetails.getSourceCardinality();
sampleCard = Math.min(idxDetails.getSampleCardinalityTarget(), origDatasetCard);
- boolean unnest = joinEnum.findUnnestOp(selOp);
+ boolean unnest = joinEnum.findUnnestOp(leafInput);
if (unnest) {
ILogicalExpression saveExpr = selOp.getCondition().getValue();
- double unnestSampleCard = joinEnum.stats.computeUnnestedOriginalCardinality(selOp);
+ double unnestSampleCard = joinEnum.stats.computeUnnestedOriginalCardinality(leafInput);
selOp.getCondition().setValue(saveExpr); // restore the expression
unnestFactor = unnestSampleCard / sampleCard;
sampleCard = unnestSampleCard;
@@ -446,7 +446,10 @@
if (this.applicableJoinConditions.size() >= 3) {
redundantSel = removeRedundantPred(this.applicableJoinConditions);
}
-
+ // mark all conditions back to non deleted state
+ for (JoinCondition jc : joinConditions) {
+ jc.deleted = false;
+ }
// By dividing by redundantSel, we are undoing the earlier multiplication of all the selectivities.
return joinCard / redundantSel;
}
@@ -456,7 +459,7 @@
if (jc1.comparisonType == JoinCondition.comparisonOp.OP_EQ
&& jc2.comparisonType == JoinCondition.comparisonOp.OP_EQ
&& jc3.comparisonType == JoinCondition.comparisonOp.OP_EQ) {
- sel = findRedundantSel(jc1.selectivity, jc2.selectivity, jc3.selectivity);
+ sel = findRedundantSel(jc1, jc2, jc3);
} else {
// at least one of the predicates in not an equality predicate
//this can get messy here, as 1, or 2 or all 3 can be non equality
@@ -472,6 +475,35 @@
return sel;
}
+ private static double findRedundantSel(JoinCondition jc1, JoinCondition jc2, JoinCondition jc3) {
+ // find middle selectivity
+ if (jc2.selectivity <= jc1.selectivity && jc1.selectivity <= jc3.selectivity) {
+ jc1.deleted = true;
+ return jc1.selectivity;
+ }
+ if (jc3.selectivity <= jc1.selectivity && jc1.selectivity <= jc2.selectivity) {
+ jc1.deleted = true;
+ return jc1.selectivity;
+ }
+ if (jc1.selectivity <= jc2.selectivity && jc2.selectivity <= jc3.selectivity) {
+ jc2.deleted = true;
+ return jc2.selectivity;
+ }
+ if (jc3.selectivity <= jc2.selectivity && jc2.selectivity <= jc1.selectivity) {
+ jc2.deleted = true;
+ return jc2.selectivity;
+ }
+ if (jc1.selectivity <= jc3.selectivity && jc3.selectivity <= jc2.selectivity) {
+ jc3.deleted = true;
+ return jc3.selectivity;
+ }
+ if (jc2.selectivity <= jc3.selectivity && jc3.selectivity <= jc1.selectivity) {
+ jc3.deleted = true;
+ return jc3.selectivity;
+ }
+ return 1.0; // keep compiler happy
+ }
+
// if a redundant edge is found, we need to eliminate one of the edges.
// If two triangles share an edge, removing the common edge will suffice
// Each edge has two vertices. So we can only handle predicate with exactly two tables such as R.a = S.a
@@ -485,21 +517,21 @@
int[] verticesCopy = new int[6];
for (int i = 0; i <= applicablePredicatesInCurrentJn.size() - 3; i++) {
jc1 = joinConditions.get(applicablePredicatesInCurrentJn.get(i));
- if (jc1.partOfComposite) {
+ if (jc1.partOfComposite || jc1.deleted) {
continue; // must ignore these or the same triangles will be found more than once.
}
vertices[0] = jc1.leftSideBits;
vertices[1] = jc1.rightSideBits;
for (int j = i + 1; j <= applicablePredicatesInCurrentJn.size() - 2; j++) {
jc2 = joinConditions.get(applicablePredicatesInCurrentJn.get(j));
- if (jc2.partOfComposite) {
+ if (jc2.partOfComposite || jc2.deleted) {
continue;
}
vertices[2] = jc2.leftSideBits;
vertices[3] = jc2.rightSideBits;
for (int k = j + 1; k <= applicablePredicatesInCurrentJn.size() - 1; k++) {
jc3 = joinConditions.get(applicablePredicatesInCurrentJn.get(k));
- if (jc3.partOfComposite) {
+ if (jc3.partOfComposite || jc3.deleted) {
continue;
}
vertices[4] = jc3.leftSideBits;
@@ -510,7 +542,9 @@
if (verticesCopy[0] == verticesCopy[1] && verticesCopy[2] == verticesCopy[3]
&& verticesCopy[4] == verticesCopy[5]) {
// redundant edge found
- redundantSel *= adjustSelectivities(jc1, jc2, jc3);
+ if (!(jc1.deleted || jc2.deleted || jc3.deleted)) {
+ redundantSel *= adjustSelectivities(jc1, jc2, jc3);
+ }
}
}
}
@@ -518,16 +552,6 @@
return redundantSel;
}
- private static double findRedundantSel(double sel1, double sel2, double sel3) {
- double[] sels = new double[3];
- sels[0] = sel1;
- sels[1] = sel2;
- sels[2] = sel3;
-
- Arrays.sort(sels); // we are sorting to make this deterministic
- return sels[1]; // the middle one is closest to one of the extremes
- }
-
protected int addSingleDatasetPlans() {
List<PlanNode> allPlans = joinEnum.allPlans;
ICost opCost;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java
index aa2f40f..c6f274f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/cbo/Stats.java
@@ -118,9 +118,7 @@
// Since there is a left and right dataset here, expecting only two variables.
return 1.0;
}
- if (!(joinExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ))) {
- return 0.5; // we will assume half; rest of the code assumes EQ joins
- }
+
int idx1, idx2;
if (joinEnum.varLeafInputIds.containsKey(exprUsedVars.get(0))) {
idx1 = joinEnum.varLeafInputIds.get(exprUsedVars.get(0));
@@ -168,6 +166,9 @@
} else {
ILogicalOperator leafInput;
LogicalVariable var;
+ if (!(joinExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ))) {
+ return 0.5; // we will assume half; rest of the code assumes EQ joins
+ }
// choose the smaller side sample; better results this way for sure!
if (card1 < card2) {
leafInput = joinEnum.leafInputs.get(idx1 - 1);
@@ -537,11 +538,11 @@
return record.numberOfFields();
}
- public double computeUnnestedOriginalCardinality(SelectOperator selOp) throws AlgebricksException {
+ public double computeUnnestedOriginalCardinality(ILogicalOperator op) throws AlgebricksException {
// Replace ALL SELECTS with TRUE, restore them after running the sampling query.
- List<ILogicalExpression> selExprs = storeSelectConditionsAndMakeThemTrue(selOp, null);
- List<List<IAObject>> result = runSamplingQuery(optCtx, selOp);
- restoreAllSelectConditions(selOp, selExprs, null);
+ List<ILogicalExpression> selExprs = storeSelectConditionsAndMakeThemTrue(op, null);
+ List<List<IAObject>> result = runSamplingQuery(optCtx, op);
+ restoreAllSelectConditions(op, selExprs, null);
return findPredicateCardinality(result, false);
}
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index 63bb7da..5b2f8a4 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -513,7 +513,7 @@
<profile>
<id>asterix-gerrit-asterix-app</id>
<properties>
- <test.excludes>**/CloudStorageTest.java,**/CloudStorageGCSTest.java,**/SqlppExecutionWithCancellationTest.java,**/DmlTest.java,**/RepeatedTest.java,**/SqlppExecutionTest.java,**/SqlppExecutionColumnTest.java,**/*StaticPartitioning*Test.java,**/*Ssl*Test.java,**/Podman*.java,**/*AnalyzedExecutionTest.java,**/SqlppProfiledExecutionTest.java,**/CloudPythonTest.java</test.excludes>
+ <test.excludes>**/CloudStorageTest.java,**/CloudStorageGCSTest.java,**/SqlppExecutionWithCancellationTest.java,**/DmlTest.java,**/RepeatedTest.java,**/SqlppExecutionTest.java,**/SqlppExecutionColumnTest.java,**/*StaticPartitioning*Test.java,**/*Ssl*Test.java,**/Podman*.java,**/*AnalyzedExecutionTest.java,**/SqlppProfiledExecutionTest.java,**/CloudPythonTest.java,**/CloudStorageAzTest.java</test.excludes>
<itest.excludes>**/*.java</itest.excludes>
</properties>
<build>
@@ -590,6 +590,43 @@
<failIfNoTests>false</failIfNoTests>
</properties>
</profile>
+ <profile>
+ <id>azurite-tests</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>com.github.eirslett</groupId>
+ <artifactId>frontend-maven-plugin</artifactId>
+ <version>1.13.4</version>
+ <configuration>
+ <nodeVersion>v14.15.4</nodeVersion>
+ <npmVersion>6.14.11</npmVersion>
+ <workingDirectory>target/npm</workingDirectory>
+ <installDirectory>target/npm</installDirectory>
+ </configuration>
+ <executions>
+ <execution>
+ <id>install node and yarn</id>
+ <goals>
+ <goal>install-node-and-npm</goal>
+ </goals>
+ <phase>${azurite.npm.install.stage}</phase>
+ </execution>
+ <execution>
+ <id>azurite blob</id>
+ <phase>${azurite.install.stage}</phase>
+ <goals>
+ <goal>npm</goal>
+ </goals>
+ <configuration>
+ <arguments>install azurite</arguments>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
</profiles>
<dependencies>
<dependency>
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 0c736a2..f30c800 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -109,6 +109,7 @@
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.ExternalDataUtils;
import org.apache.asterix.external.util.WriterValidationUtil;
+import org.apache.asterix.external.writer.printer.parquet.SchemaConverterVisitor;
import org.apache.asterix.lang.common.base.IQueryRewriter;
import org.apache.asterix.lang.common.base.IReturningStatement;
import org.apache.asterix.lang.common.base.IRewriterFactory;
@@ -383,10 +384,12 @@
activeNamespace = handleUseDataverseStatement(metadataProvider, stmt);
break;
case CREATE_DATABASE:
- handleCreateDatabaseStatement(metadataProvider, stmt, requestParameters);
+ handleCreateDatabaseStatement(metadataProvider, stmt, requestParameters,
+ MetadataConstants.DEFAULT_OWNER);
break;
case CREATE_DATAVERSE:
- handleCreateDataverseStatement(metadataProvider, stmt, requestParameters);
+ handleCreateDataverseStatement(metadataProvider, stmt, requestParameters,
+ MetadataConstants.DEFAULT_OWNER);
break;
case DATASET_DECL:
handleCreateDatasetStatement(metadataProvider, stmt, hcc, requestParameters);
@@ -656,7 +659,7 @@
}
protected void handleCreateDatabaseStatement(MetadataProvider metadataProvider, Statement stmt,
- IRequestParameters requestParameters) throws Exception {
+ IRequestParameters requestParameters, String ownerName) throws Exception {
CreateDatabaseStatement stmtCreateDatabase = (CreateDatabaseStatement) stmt;
String database = stmtCreateDatabase.getDatabaseName().getValue();
metadataProvider.validateDatabaseName(database, stmt.getSourceLocation());
@@ -665,14 +668,14 @@
}
lockUtil.createDatabaseBegin(lockManager, metadataProvider.getLocks(), database);
try {
- doCreateDatabaseStatement(metadataProvider, stmtCreateDatabase, requestParameters);
+ doCreateDatabaseStatement(metadataProvider, stmtCreateDatabase, requestParameters, ownerName);
} finally {
metadataProvider.getLocks().unlock();
}
}
protected boolean doCreateDatabaseStatement(MetadataProvider mdProvider, CreateDatabaseStatement stmtCreateDatabase,
- IRequestParameters requestParameters) throws Exception {
+ IRequestParameters requestParameters, String ownerName) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
mdProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -687,8 +690,9 @@
databaseName);
}
}
+
MetadataManager.INSTANCE.addDatabase(mdTxnCtx,
- new Database(databaseName, false, MetadataUtil.PENDING_NO_OP));
+ new Database(databaseName, false, ownerName, MetadataUtil.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return true;
} catch (Exception e) {
@@ -698,7 +702,7 @@
}
protected void handleCreateDataverseStatement(MetadataProvider metadataProvider, Statement stmt,
- IRequestParameters requestParameters) throws Exception {
+ IRequestParameters requestParameters, String ownerName) throws Exception {
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
DataverseName dvName = stmtCreateDataverse.getDataverseName();
String dbName = stmtCreateDataverse.getDatabaseName();
@@ -709,7 +713,7 @@
}
lockUtil.createDataverseBegin(lockManager, metadataProvider.getLocks(), dbName, dvName);
try {
- doCreateDataverseStatement(metadataProvider, stmtCreateDataverse, requestParameters);
+ doCreateDataverseStatement(metadataProvider, stmtCreateDataverse, requestParameters, ownerName);
} finally {
metadataProvider.getLocks().unlock();
}
@@ -717,7 +721,8 @@
@SuppressWarnings("squid:S00112")
protected boolean doCreateDataverseStatement(MetadataProvider metadataProvider,
- CreateDataverseStatement stmtCreateDataverse, IRequestParameters requestParameters) throws Exception {
+ CreateDataverseStatement stmtCreateDataverse, IRequestParameters requestParameters, String ownerName)
+ throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -740,8 +745,8 @@
dvName);
}
}
- MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(),
- new Dataverse(dbName, dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP));
+ MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dbName,
+ dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP, ownerName));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return true;
} catch (Exception e) {
@@ -2068,8 +2073,8 @@
// second, inserting the database record with the PendingDropOp value into the 'Database' collection
// Note: the delete operation fails if the database cannot be deleted due to metadata dependencies
MetadataManager.INSTANCE.dropDatabase(mdTxnCtx, databaseName);
- MetadataManager.INSTANCE.addDatabase(mdTxnCtx,
- new Database(databaseName, database.isSystemDatabase(), MetadataUtil.PENDING_DROP_OP));
+ MetadataManager.INSTANCE.addDatabase(mdTxnCtx, new Database(databaseName, database.isSystemDatabase(),
+ MetadataConstants.DEFAULT_OWNER, MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
@@ -2254,8 +2259,8 @@
// second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
// Note: the delete operation fails if the dataverse cannot be deleted due to metadata dependencies
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, databaseName, dataverseName);
- MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
- new Dataverse(databaseName, dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
+ MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(databaseName, dataverseName,
+ dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP, MetadataConstants.DEFAULT_OWNER));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
@@ -4089,6 +4094,21 @@
ExternalDataConstants.WRITER_SUPPORTED_ADAPTERS, copyTo.getSourceLocation(), mdTxnCtx,
metadataProvider));
+ if (edd.getProperties().get(ExternalDataConstants.KEY_FORMAT)
+ .equalsIgnoreCase(ExternalDataConstants.FORMAT_PARQUET)) {
+ if (copyTo.getType() == null) {
+ throw new CompilationException(ErrorCode.COMPILATION_ERROR,
+ "TYPE() Expression is required for parquet format");
+ }
+
+ DataverseName dataverseName =
+ DataverseName.createFromCanonicalForm(ExternalDataConstants.DUMMY_DATAVERSE_NAME);
+ IAType iaType = translateType(ExternalDataConstants.DUMMY_DATABASE_NAME, dataverseName,
+ ExternalDataConstants.DUMMY_TYPE_NAME, copyTo.getType(), mdTxnCtx);
+ edd.getProperties().put(ExternalDataConstants.PARQUET_SCHEMA_KEY,
+ SchemaConverterVisitor.convertToParquetSchemaString((ARecordType) iaType));
+ }
+
Map<VarIdentifier, IAObject> externalVars = createExternalVariables(copyTo, stmtParams);
// Query Rewriting (happens under the same ongoing metadata transaction)
LangRewritingContext langRewritingContext = createLangRewritingContext(metadataProvider,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 0b7e7d0..6928b64 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -91,6 +91,7 @@
import org.apache.asterix.common.metadata.NamespacePathResolver;
import org.apache.asterix.common.metadata.NamespaceResolver;
import org.apache.asterix.common.replication.INcLifecycleCoordinator;
+import org.apache.asterix.common.utils.IdentifierUtil;
import org.apache.asterix.common.utils.Servlets;
import org.apache.asterix.external.adapter.factory.AdapterFactoryService;
import org.apache.asterix.file.StorageComponentProvider;
@@ -165,7 +166,7 @@
ccServiceCtx.setMessageBroker(new CCMessageBroker(controllerService));
ccServiceCtx.setPersistedResourceRegistry(new PersistedResourceRegistry());
configureLoggingLevel(ccServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
- LOGGER.info("Starting Asterix cluster controller");
+ LOGGER.info("Starting {} cluster controller", IdentifierUtil.productName());
String strIP = ccServiceCtx.getCCContext().getClusterControllerInfo().getClientNetAddress();
int port = ccServiceCtx.getCCContext().getClusterControllerInfo().getClientNetPort();
hcc = new HyracksConnection(strIP, port,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
index 0ff8796..6f8126b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
@@ -79,6 +79,7 @@
import org.apache.asterix.common.transactions.IRecoveryManager;
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
import org.apache.asterix.common.transactions.IRecoveryManagerFactory;
+import org.apache.asterix.common.utils.IdentifierUtil;
import org.apache.asterix.common.utils.PrintUtil;
import org.apache.asterix.common.utils.Servlets;
import org.apache.asterix.common.utils.StorageConstants;
@@ -148,9 +149,7 @@
throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
}
nodeId = this.ncServiceCtx.getNodeId();
- if (LOGGER.isInfoEnabled()) {
- LOGGER.info("Starting Asterix node controller: " + nodeId);
- }
+ LOGGER.info("Starting {} node controller: {}", IdentifierUtil.productName(), nodeId);
final NodeControllerService controllerService = (NodeControllerService) ncServiceCtx.getControllerService();
if (System.getProperty("java.rmi.server.hostname") == null) {
@@ -280,9 +279,7 @@
if (!stopInitiated) {
runtimeContext.setShuttingdown(true);
stopInitiated = true;
- if (LOGGER.isInfoEnabled()) {
- LOGGER.info("Stopping Asterix node controller: " + nodeId);
- }
+ LOGGER.info("Stopping {} node controller: {}", IdentifierUtil.productName(), nodeId);
webManager.stop();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 8aa25ad..2af06b7 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -36,6 +36,7 @@
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.dataflow.LSMInsertDeleteOperatorNodePushable;
import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.metadata.MetadataConstants;
import org.apache.asterix.common.metadata.MetadataUtil;
import org.apache.asterix.common.metadata.Namespace;
import org.apache.asterix.common.metadata.NamespacePathResolver;
@@ -500,7 +501,7 @@
PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType,
mergePolicy.first, mergePolicy.second, filterFields, primaryKeyIndexes, primaryKeyIndicators);
Dataverse dataverse = new Dataverse(dataset.getDatabaseName(), dataset.getDataverseName(),
- NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+ NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP, MetadataConstants.DEFAULT_OWNER);
Namespace namespace = new Namespace(dataverse.getDatabaseName(), dataverse.getDataverseName());
MetadataProvider mdProvider = MetadataProvider.create(
(ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), namespace);
@@ -526,9 +527,9 @@
org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy =
DatasetUtil.getMergePolicyFactory(primaryIndexInfo.dataset, mdTxnCtx);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- Dataverse dataverse =
- new Dataverse(primaryIndexInfo.dataset.getDatabaseName(), primaryIndexInfo.dataset.getDataverseName(),
- NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+ Dataverse dataverse = new Dataverse(primaryIndexInfo.dataset.getDatabaseName(),
+ primaryIndexInfo.dataset.getDataverseName(), NonTaggedDataFormat.class.getName(),
+ MetadataUtil.PENDING_NO_OP, MetadataConstants.DEFAULT_OWNER);
Namespace namespace = new Namespace(dataverse.getDatabaseName(), dataverse.getDataverseName());
MetadataProvider mdProvider = MetadataProvider.create(
(ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), namespace);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageAzTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageAzTest.java
new file mode 100644
index 0000000..508810d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageAzTest.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.test.cloud_storage;
+
+import static org.apache.asterix.api.common.LocalCloudUtil.CLOUD_STORAGE_BUCKET;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Objects;
+import java.util.Random;
+
+import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.test.common.TestExecutor;
+import org.apache.asterix.test.runtime.LangExecutionUtil;
+import org.apache.asterix.testframework.context.TestCaseContext;
+import org.apache.asterix.testframework.xml.Description;
+import org.apache.asterix.testframework.xml.TestCase;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.MethodSorters;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.common.StorageSharedKeyCredential;
+
+/**
+ * Run tests in cloud deployment environment
+ */
+@RunWith(Parameterized.class)
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class CloudStorageAzTest {
+
+ private static final Logger LOGGER = LogManager.getLogger();
+
+ private final TestCaseContext tcCtx;
+ private static final String SUITE_TESTS = "testsuite_cloud_storage.xml";
+ private static final String ONLY_TESTS = "testsuite_cloud_storage_only.xml";
+ private static final String CONFIG_FILE_NAME = "src/test/resources/cc-cloud-storage-azblob.conf";
+ private static final String DELTA_RESULT_PATH = "results_cloud";
+ private static final String EXCLUDED_TESTS = "MP";
+
+ public CloudStorageAzTest(TestCaseContext tcCtx) {
+ this.tcCtx = tcCtx;
+ }
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ String endpointString = "http://127.0.0.1:15055/devstoreaccount1/" + CLOUD_STORAGE_BUCKET;
+ final String accKey =
+ "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
+ final String accName = "devstoreaccount1";
+
+ BlobServiceClient blobServiceClient = new BlobServiceClientBuilder().endpoint(endpointString)
+ .credential(new StorageSharedKeyCredential(accName, accKey)).buildClient();
+
+ cleanup(blobServiceClient);
+ initialize(blobServiceClient);
+
+ //storage.close(); WHAT IS THIS FOR IN GCS
+
+ TestExecutor testExecutor = new TestExecutor(DELTA_RESULT_PATH);
+ testExecutor.executorId = "cloud";
+ testExecutor.stripSubstring = "//DB:";
+ LangExecutionUtil.setUp(CONFIG_FILE_NAME, testExecutor);
+ System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, CONFIG_FILE_NAME);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ LangExecutionUtil.tearDown();
+ }
+
+ @Parameters(name = "CloudStorageAzBlobTest {index}: {0}")
+ public static Collection<Object[]> tests() throws Exception {
+ long seed = System.nanoTime();
+ Random random = new Random(seed);
+ LOGGER.info("CloudStorageAzBlobTest seed {}", seed);
+ Collection<Object[]> tests = LangExecutionUtil.tests(ONLY_TESTS, SUITE_TESTS);
+ List<Object[]> selected = new ArrayList<>();
+ for (Object[] test : tests) {
+ if (!Objects.equals(((TestCaseContext) test[0]).getTestGroups()[0].getName(), "sqlpp_queries")) {
+ selected.add(test);
+ }
+ // Select 10% of the tests randomly
+ else if (random.nextInt(10) == 0) {
+ selected.add(test);
+ }
+ }
+ return selected;
+ }
+
+ @Test
+ public void test() throws Exception {
+ List<TestCase.CompilationUnit> cu = tcCtx.getTestCase().getCompilationUnit();
+ Assume.assumeTrue(cu.size() > 1 || !EXCLUDED_TESTS.equals(getText(cu.get(0).getDescription())));
+ LangExecutionUtil.test(tcCtx);
+ }
+
+ private static String getText(Description description) {
+ return description == null ? "" : description.getValue();
+ }
+
+ private static void cleanup(BlobServiceClient blobServiceClient) {
+ blobServiceClient.deleteBlobContainerIfExists(CLOUD_STORAGE_BUCKET);
+ }
+
+ private static void initialize(BlobServiceClient blobServiceClient) {
+ blobServiceClient.createBlobContainerIfNotExists(CLOUD_STORAGE_BUCKET);
+ }
+}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java
index 3a03445..d62f8f0 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/cloud_storage/CloudStorageGCSTest.java
@@ -21,8 +21,11 @@
import static org.apache.asterix.api.common.LocalCloudUtil.CLOUD_STORAGE_BUCKET;
import static org.apache.asterix.api.common.LocalCloudUtil.MOCK_SERVER_REGION;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
+import java.util.Objects;
+import java.util.Random;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.test.common.TestExecutor;
@@ -36,7 +39,6 @@
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
@@ -55,7 +57,6 @@
*/
@RunWith(Parameterized.class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-@Ignore
public class CloudStorageGCSTest {
private static final Logger LOGGER = LogManager.getLogger();
@@ -94,7 +95,21 @@
@Parameters(name = "CloudStorageGCSTest {index}: {0}")
public static Collection<Object[]> tests() throws Exception {
- return LangExecutionUtil.tests(ONLY_TESTS, SUITE_TESTS);
+ long seed = System.nanoTime();
+ Random random = new Random(seed);
+ LOGGER.info("CloudStorageGCSTest seed {}", seed);
+ Collection<Object[]> tests = LangExecutionUtil.tests(ONLY_TESTS, SUITE_TESTS);
+ List<Object[]> selected = new ArrayList<>();
+ for (Object[] test : tests) {
+ if (!Objects.equals(((TestCaseContext) test[0]).getTestGroups()[0].getName(), "sqlpp_queries")) {
+ selected.add(test);
+ }
+ // Select 10% of the tests randomly
+ else if (random.nextInt(10) == 0) {
+ selected.add(test);
+ }
+ }
+ return selected;
}
@Test
diff --git a/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-azblob.conf b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-azblob.conf
new file mode 100644
index 0000000..d11cb5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-azblob.conf
@@ -0,0 +1,72 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements. See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership. The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License. You may obtain a copy of the License at
+;
+; http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied. See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[nc/asterix_nc1]
+txn.log.dir=target/tmp/asterix_nc1/txnlog
+core.dump.dir=target/tmp/asterix_nc1/coredump
+iodevices=target/tmp/asterix_nc1/iodevice1
+iodevices=../asterix-server/target/tmp/asterix_nc1/iodevice2
+nc.api.port=19004
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5006
+
+[nc/asterix_nc2]
+ncservice.port=9091
+txn.log.dir=target/tmp/asterix_nc2/txnlog
+core.dump.dir=target/tmp/asterix_nc2/coredump
+iodevices=target/tmp/asterix_nc2/iodevice1,../asterix-server/target/tmp/asterix_nc2/iodevice2
+nc.api.port=19005
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5007
+
+[nc]
+credential.file=src/test/resources/security/passwd
+python.cmd.autolocate=true
+python.env=FOO=BAR=BAZ,BAR=BAZ
+address=127.0.0.1
+command=asterixnc
+app.class=org.apache.asterix.hyracks.bootstrap.NCApplication
+jvm.args=-Xmx4096m -Dnode.Resolver="org.apache.asterix.external.util.IdentitiyResolverFactory"
+storage.buffercache.size=128MB
+storage.memorycomponent.globalbudget=512MB
+
+[cc]
+address = 127.0.0.1
+app.class=org.apache.asterix.hyracks.bootstrap.CCApplication
+heartbeat.period=2000
+heartbeat.max.misses=25
+credential.file=src/test/resources/security/passwd
+
+[common]
+log.dir = logs/
+log.level = INFO
+compiler.framesize=32KB
+compiler.sortmemory=320KB
+compiler.groupmemory=160KB
+compiler.joinmemory=256KB
+compiler.textsearchmemory=160KB
+compiler.windowmemory=192KB
+compiler.internal.sanitycheck=true
+messaging.frame.size=4096
+messaging.frame.count=512
+cloud.deployment=true
+storage.buffercache.pagesize=32KB
+storage.partitioning=static
+cloud.storage.scheme=azblob
+cloud.storage.bucket=cloud-storage-container
+cloud.storage.region=us-east-2
+cloud.storage.endpoint=http://127.0.0.1:15055
+cloud.storage.anonymous.auth=true
+cloud.storage.cache.policy=lazy
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf
index 3c883a8..0046644 100644
--- a/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf
+++ b/asterixdb/asterix-app/src/test/resources/cc-cloud-storage-gcs.conf
@@ -70,3 +70,6 @@
cloud.storage.endpoint=http://127.0.0.1:4443
cloud.storage.anonymous.auth=true
cloud.storage.cache.policy=selective
+cloud.max.write.requests.per.second=1000
+cloud.max.read.requests.per.second=5000
+cloud.write.buffer.size=5
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.1.ddl.sqlpp
new file mode 100644
index 0000000..ad97753
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.1.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.3.query.sqlpp
new file mode 100644
index 0000000..f19b678
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_01/countn_01.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+select element test.array_countn((
+ select element x
+ from [1,2,3,"four"] as x
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_02/countn_02.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_02/countn_02.1.query.sqlpp
new file mode 100644
index 0000000..261dfa0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_02/countn_02.1.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+select value array_countn(x) from [
+{"x":[1,2,3]},
+{"x":[1,2,null]},
+{"x":[1,"two"]},
+{"x":[1,"two", null]},
+{"x":[null]},
+{"x":[]},
+{"a":[1,2,3]}
+] as list
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_distinct/countn_distinct.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_distinct/countn_distinct.1.query.sqlpp
new file mode 100644
index 0000000..526c423
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_distinct/countn_distinct.1.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ {
+ 't1': array_countn(distinct [10,20,30,10,20,30,10]),
+ 't2': array_countn(distinct [null,missing,null,missing]),
+ 't3': array_countn(distinct [40,null,missing,50,40,null,missing,50,40]),
+ 't4': array_countn(distinct [10,20,30,10,20,"thirty",20]),
+ 't5': array_countn(distinct [])
+ }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.1.ddl.sqlpp
new file mode 100644
index 0000000..0dcc111
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.1.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ * without an aggregate combiner.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.2.update.sqlpp
new file mode 100644
index 0000000..de14e51
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ * without an aggregate combiner.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.3.query.sqlpp
new file mode 100644
index 0000000..3eec594
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_01/countn_empty_01.3.query.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that array_countn aggregation correctly returns null for an empty stream,
+ * without an aggregate combiner.
+ * Success : Yes
+ */
+
+select element array_countn((
+ select element x
+ from [1,2,3] as x
+ where (x > 10)
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.1.ddl.sqlpp
new file mode 100644
index 0000000..29f79ca
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ * with an aggregate combiner.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
+create type test.TestType as
+ closed {
+ id : bigint,
+ val : double
+};
+
+create dataset Test(TestType) primary key id;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.2.update.sqlpp
new file mode 100644
index 0000000..379d8e8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that array_countn aggregation correctly returns 0 for an empty stream,
+ * with an aggregate combiner.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.3.query.sqlpp
new file mode 100644
index 0000000..c9ae713
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_empty_02/countn_empty_02.3.query.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that array_count aggregation correctly returns 0 for an empty stream,
+ * with an aggregate combiner.
+ * Success : Yes
+ */
+
+use test;
+
+
+select element test.array_count((
+ select element x.val
+ from Test as x
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..6788afc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
+create type test.NumericType as
+{
+ id : bigint,
+ int8Field : tinyint?,
+ int16Field : smallint?,
+ int32Field : integer?,
+ int64Field : bigint?,
+ floatField : float?,
+ doubleField : double?
+};
+
+create external dataset Numeric(NumericType) using localfs((`path`=`asterix_nc1://data/nontagged/numericData.json`),(`format`=`adm`));
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.3.query.sqlpp
new file mode 100644
index 0000000..8a84b02
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/countn_null/countn_null.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+{'sql-count':test.array_countn((
+ select element x.doubleField
+ from Numeric as x
+))};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.1.ddl.sqlpp
new file mode 100644
index 0000000..6a5a783
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn without nulls.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.2.update.sqlpp
new file mode 100644
index 0000000..f0dcccb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn without nulls.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.3.query.sqlpp
new file mode 100644
index 0000000..dce1b5d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn/scalar_countn.3.query.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn without nulls.
+ * Success : Yes
+ */
+
+use test;
+
+
+with i8 as test.array_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3')]),
+ i16 as test.array_countn([test.smallint('1'),test.smallint('2'),test.smallint('3')]),
+ i32 as test.array_countn([test.integer('1'),test.integer('2'),test.integer('3')]),
+ i64 as test.array_countn([test.bigint('1'),test.bigint('2'),test.bigint('3')]),
+ f as test.array_countn([test.float('1'),test.float('2'),test.float('3')]),
+ d as test.array_countn([test.double('1'),test.double('2'),test.double('3')]),
+ s as test.array_countn(['a','b','c'])
+select element i
+from [i8,i16,i32,i64,f,d,s] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
new file mode 100644
index 0000000..b924afc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn with an empty list.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
new file mode 100644
index 0000000..ee0152f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn with an empty list.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
new file mode 100644
index 0000000..901dda1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn with an empty list.
+ * Success : Yes
+ */
+
+use test;
+
+
+select element test.array_countn([]);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..7a939bd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn with nulls.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.2.update.sqlpp
new file mode 100644
index 0000000..63eec3e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn with nulls.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.3.query.sqlpp
new file mode 100644
index 0000000..0f2c33a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_countn_null/scalar_countn_null.3.query.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of array_countn with nulls.
+ * Success : Yes
+ */
+
+use test;
+
+
+with i8 as test.array_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3'),null]),
+ i16 as test.array_countn([test.smallint('1'),test.smallint('2'),test.smallint('3'),null]),
+ i32 as test.array_countn([test.integer('1'),test.integer('2'),test.integer('3'),null]),
+ i64 as test.array_countn([test.bigint('1'),test.bigint('2'),test.bigint('3'),null]),
+ f as test.array_countn([test.float('1'),test.float('2'),test.float('3'),null]),
+ d as test.array_countn([test.double('1'),test.double('2'),test.double('3'),null]),
+ s as test.array_countn(['a','b','c',null]),
+ na as test.array_countn([null]),
+ n as test.array_countn(null),
+ m as test.array_countn(missing)
+select element i
+from [i8,i16,i32,i64,f,d,s,na,n,m] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.1.ddl.sqlpp
new file mode 100644
index 0000000..ad97753
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.1.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.3.query.sqlpp
new file mode 100644
index 0000000..31e7b07
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_01/countn_01.3.query.sqlpp
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+select element test.strict_countn((
+ select element x
+ from [1,2,3,"four"] as x
+));
+
+select element test.countn(x) from [
+{"x":1},{"x":2},{"x":3},{"x":"four"},{"x":null},{"x":[1,2,3]},{"x":"1"},{"x":{"k":1}}
+] as list;
+
+select n, test.countn(x) from [
+ {"x":1,"n":"a"},
+ {"x":2,"n":"a"},
+ {"x":3,"n":"b"},
+ {"x":"four","n":"a"},
+ {"x":null,"n":"b"},
+ {"x":[1,2,3],"n":"a"},
+ {"x":"1","n":"b"},
+ {"x":{"k":1},"n":"b"},
+ {"n":"b"}
+] as list group by n;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_02/countn_02.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_02/countn_02.1.query.sqlpp
new file mode 100644
index 0000000..4a10509
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_02/countn_02.1.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+select value strict_countn(x) from [
+{"x":[1,2,3]},
+{"x":[1,2,null]},
+{"x":[1,"two"]},
+{"x":[1,"two", null]},
+{"x":[null]},
+{"x":[]},
+{"a":[1,2,3]}
+] as list
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_distinct/countn_distinct.1.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_distinct/countn_distinct.1.query.sqlpp
new file mode 100644
index 0000000..699d182
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_distinct/countn_distinct.1.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ {
+ 't1': strict_countn(distinct [10,20,30,10,20,30,10]),
+ 't2': strict_countn(distinct [null,missing,null,missing]),
+ 't3': strict_countn(distinct [40,null,missing,50,40,null,missing,50,40]),
+ 't4': strict_countn(distinct [10,20,30,10,20,"thirty",20]),
+ 't5': strict_countn(distinct [])
+ }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.1.ddl.sqlpp
new file mode 100644
index 0000000..55cfa23
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.1.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that countn aggregation correctly returns 0 for an empty stream,
+ * without an aggregate combiner.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.2.update.sqlpp
new file mode 100644
index 0000000..dcb559a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that countn aggregation correctly returns 0 for an empty stream,
+ * without an aggregate combiner.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.3.query.sqlpp
new file mode 100644
index 0000000..9228733
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_01/countn_empty_01.3.query.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that countn aggregation correctly returns null for an empty stream,
+ * without an aggregate combiner.
+ * Success : Yes
+ */
+
+select element strict_countn((
+ select element x
+ from [1,2,3] as x
+ where (x > 10)
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.1.ddl.sqlpp
new file mode 100644
index 0000000..215fd1b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that countn aggregation correctly returns 0 for an empty stream,
+ * with an aggregate combiner.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
+create type test.TestType as
+ closed {
+ id : integer,
+ val : double
+};
+
+create dataset Test(TestType) primary key id;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.2.update.sqlpp
new file mode 100644
index 0000000..d8ebd7f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.2.update.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that countn aggregation correctly returns 0 for an empty stream,
+ * with an aggregate combiner.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.3.query.sqlpp
new file mode 100644
index 0000000..2844438
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_empty_02/countn_empty_02.3.query.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests that countn aggregation correctly returns null for an empty stream,
+ * with an aggregate combiner.
+ * Success : Yes
+ */
+
+use test;
+
+
+select element test.strict_countn((
+ select element x.val
+ from Test as x
+));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..3f2c5bf
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.1.ddl.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+
+create type test.NumericType as
+{
+ id : integer,
+ int8Field : tinyint?,
+ int16Field : smallint?,
+ int32Field : integer?,
+ int64Field : bigint?,
+ floatField : float?,
+ doubleField : double?
+};
+
+create external dataset Numeric(NumericType) using localfs((`path`=`asterix_nc1://data/nontagged/numericData.json`),(`format`=`adm`));
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.2.update.sqlpp
new file mode 100644
index 0000000..bd244d0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.2.update.sqlpp
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.3.query.sqlpp
new file mode 100644
index 0000000..d5a60c4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/countn_null/countn_null.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+
+{'count':test.strict_countn((
+ select element x.doubleField
+ from Numeric as x
+))};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.1.ddl.sqlpp
new file mode 100644
index 0000000..1172330
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of count without nulls.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.2.update.sqlpp
new file mode 100644
index 0000000..411b984
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of count without nulls.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.3.query.sqlpp
new file mode 100644
index 0000000..dd1b016
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn/scalar_countn.3.query.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of count without nulls.
+ * Success : Yes
+ */
+
+use test;
+
+
+with i8 as test.strict_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3')]),
+ i16 as test.strict_countn([test.smallint('1'),test.smallint('2'),test.smallint('3')]),
+ i32 as test.strict_countn([test.integer('1'),test.integer('2'),test.integer('3')]),
+ i64 as test.strict_countn([test.bigint('1'),test.bigint('2'),test.bigint('3')]),
+ f as test.strict_countn([test.float('1'),test.float('2'),test.float('3')]),
+ d as test.strict_countn([test.double('1'),test.double('2'),test.double('3')]),
+ s as test.strict_countn(['a','b','c'])
+select element i
+from [i8,i16,i32,i64,f,d,s] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
new file mode 100644
index 0000000..bbcfff8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of countn with an empty list.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
new file mode 100644
index 0000000..e6cd404
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of countn with an empty list.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
new file mode 100644
index 0000000..201aef3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_empty/scalar_countn_empty.3.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of countn with an empty list.
+ * Success : Yes
+ */
+
+use test;
+
+
+select element test.strict_countn([]);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
new file mode 100644
index 0000000..cb7c70c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.1.ddl.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of countn with nulls.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.2.update.sqlpp
new file mode 100644
index 0000000..b7da5af
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of countn with nulls.
+ * Success : Yes
+ */
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.3.query.sqlpp
new file mode 100644
index 0000000..393d16e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_countn_null/scalar_countn_null.3.query.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Tests the scalar version of countn with nulls.
+ * Success : Yes
+ */
+
+use test;
+
+
+with i8 as test.strict_countn([test.tinyint('1'),test.tinyint('2'),test.tinyint('3'),null]),
+ i16 as test.strict_countn([test.smallint('1'),test.smallint('2'),test.smallint('3'),null]),
+ i32 as test.strict_countn([test.integer('1'),test.integer('2'),test.integer('3'),null]),
+ i64 as test.strict_countn([test.bigint('1'),test.bigint('2'),test.bigint('3'),null]),
+ f as test.strict_countn([test.float('1'),test.float('2'),test.float('3'),null]),
+ d as test.strict_countn([test.double('1'),test.double('2'),test.double('3'),null]),
+ s as test.strict_countn(['a','b','c',null]),
+ na as test.strict_countn([null]),
+ n as test.strict_countn(null),
+ m as test.strict_countn(missing)
+select element i
+from [i8,i16,i32,i64,f,d,s,na,n,m] as i
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.1.ddl.sqlpp
new file mode 100755
index 0000000..8103562
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.1.ddl.sqlpp
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+CREATE TYPE openType AS {id: int};
+CREATE DATASET ds(openType) primary key id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.2.update.sqlpp
new file mode 100755
index 0000000..a9171ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.2.update.sqlpp
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+UPSERT INTO ds {"id": 1, "arr": [0, 1, 2, 3, 4, 5]};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.query.sqlpp
new file mode 100755
index 0000000..91a479a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.query.sqlpp
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+with arr as [0, 1, 2, 3, 4, 5] select value
+{
+ "t1": (arr[2]),
+ "t2": (arr[2:4]),
+ "t3": (arr[2:10]),
+ "t4": (arr[-7:4]),
+ "t5": (arr[-7:10]),
+ "t6": (arr[-3:4]),
+ "t7": (arr[2:-1]),
+ "t8": (arr[2:]),
+ "t9": (arr[10:]),
+ "t10": (arr[-7:]),
+ "t11": (arr[:]),
+ "t12": (arr[0]),
+ "t13": (arr[:4]),
+ "t14": (arr[:10]),
+ "t15": (arr[:-7]),
+ "t16": ([][0:]),
+ "t17": ([][:0]),
+ "t18": (select value ds.arr[1:3] from ds),
+ "t19": (select value ds.arr[1:] from ds),
+ "t20": (select value ds.arr[:3] from ds),
+ "t21": (select value ds.arr[] from ds),
+ "t22": (select value ds.arr[:] from ds),
+ "t20": (select value ds.arr[:10] from ds)
+};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.4.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.4.ddl.sqlpp
new file mode 100755
index 0000000..43604c6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.4.ddl.sqlpp
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATASET ds;
+DROP TYPE openType;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.01.ddl.sqlpp
new file mode 100644
index 0000000..36d00be
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.01.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+ id: integer
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.02.update.sqlpp
new file mode 100644
index 0000000..b6ec758
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.02.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks2")
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.03.update.sqlpp
new file mode 100644
index 0000000..ce68cbd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.03.update.sqlpp
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+insert into TestCollection({"id":1, "name": "John", "nested" : { "first" : "john" , "second":"JOHN" } });
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks3")
+TYPE ( {id : string, name : string, nested: { first : string, second : string} }
+)
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.04.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.04.update.sqlpp
new file mode 100644
index 0000000..5e8d730
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.04.update.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks4")
+TYPE ( {id : bigint,name : string, nested: { first : string } } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.05.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.05.update.sqlpp
new file mode 100644
index 0000000..cda2519
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.05.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks5")
+TYPE ( { id : bigint, name : string, nested : string } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.06.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.06.update.sqlpp
new file mode 100644
index 0000000..a6db11d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.06.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks6")
+TYPE ( {id : bigint,name : { first : string },nested:{first : string,second : string} } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.07.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.07.update.sqlpp
new file mode 100644
index 0000000..851559a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.07.update.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks7")
+TYPE ( {id:int} )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet",
+ "row-group-size":"random"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.08.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.08.update.sqlpp
new file mode 100644
index 0000000..2f356fb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.08.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks8")
+TYPE ( {id:int} )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet",
+ "page-size":"random"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.09.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.09.update.sqlpp
new file mode 100644
index 0000000..f2293e3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.09.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks9")
+TYPE ( { name:string } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet",
+ "compression":"rar"
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.10.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.10.update.sqlpp
new file mode 100644
index 0000000..4f52164
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.10.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks10")
+TYPE ( { name: } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet",
+ "schema":"message schema{"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.11.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.11.update.sqlpp
new file mode 100644
index 0000000..ca2cfc1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.11.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks11")
+TYPE ( { id : int , name : binary } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.12.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.12.update.sqlpp
new file mode 100644
index 0000000..86e55e5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.12.update.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks12")
+TYPE ( { id : int, name : string } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet",
+ "version" : 3
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.13.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.13.update.sqlpp
new file mode 100644
index 0000000..281610b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.13.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks13")
+TYPE ( { id : int, name : string, list : [int,string]} )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.14.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.14.update.sqlpp
new file mode 100644
index 0000000..1d1c6af
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.14.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks14")
+TYPE ( {id : int, name : string, list : [int |} )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.15.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.15.update.sqlpp
new file mode 100644
index 0000000..a26624a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/negative/parquet-error-checks/parquet-error-checks.15.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-error-checks15")
+TYPE ( {id : int, name : string, list : [int] )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+}
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp
new file mode 100644
index 0000000..56e79c8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.01.ddl.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+ id: integer,
+ name : string
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp
new file mode 100644
index 0000000..ec1ac0c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.02.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : create a dataset using year-month-duration as the primary key
+ * Expected Res : Success
+ * Date : 7 May 2013
+ * Issue : 363
+ */
+
+use test;
+/*
+insert into TestCollection({"id":`year-month-duration`("P16Y"), "name": "John"});
+insert into TestCollection({"id":`day-time-duration`("-P3829H849.392S"), "name": "Alex"});
+*/
+
+insert into TestCollection({"id":18, "name": "Virat" , "dateType":date("1988-11-05"), "timeType": time("03:10:00.493Z") , "boolType" : false , "doubleType" : 0.75, "datetimeType" : datetime("1900-02-01T00:00:00") });
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp
new file mode 100644
index 0000000..0012e22
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.03.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ select c.* from TestCollection c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-cover-data-types")
+TYPE ( { name : string, id : int, dateType : date, timeType : time, boolType : boolean, doubleType : double, datetimeType : datetime } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp
new file mode 100644
index 0000000..310198e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.04.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+};
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING S3
+(
+ ("serviceEndpoint"="http://127.0.0.1:8001"),
+ ("region"="us-west-2"),
+ ("container"="playground"),
+ ("definition"="copy-to-result/parquet-cover-data-types/"),
+ ("include"="*.parquet"),
+ ("requireVersionChangeDetection"="false"),
+ ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp
new file mode 100644
index 0000000..b03fc5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp
new file mode 100644
index 0000000..6be9489
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.01.ddl.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+
+CREATE TYPE ColumnType1 AS {
+ id: integer
+};
+
+CREATE COLLECTION TestCollection(ColumnType1) PRIMARY KEY id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.02.update.sqlpp
new file mode 100644
index 0000000..d569ee5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.02.update.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+INSERT INTO TestCollection {"id":2,"name":{"first":["power","star"]}};
+INSERT INTO TestCollection {"id":5,"name":{"first":[]}};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.03.update.sqlpp
new file mode 100644
index 0000000..9a652d2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.03.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+select c.* from TestCollection c
+ ) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-empty-array")
+TYPE ( { id : int, name : { first : [ string ] } } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+ };
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp
new file mode 100644
index 0000000..f17c4d4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.04.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE TYPE ColumnType2 AS {
+ };
+
+
+
+CREATE EXTERNAL DATASET TestDataset(ColumnType2) USING S3
+(
+ ("serviceEndpoint"="http://127.0.0.1:8001"),
+ ("region"="us-west-2"),
+ ("container"="playground"),
+ ("definition"="copy-to-result/parquet-empty-array/"),
+ ("include"="*.parquet"),
+ ("requireVersionChangeDetection"="false"),
+ ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.05.query.sqlpp
new file mode 100644
index 0000000..b03fc5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-empty-array/parquet-empty-array.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM TestDataset c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.01.ddl.sqlpp
new file mode 100644
index 0000000..76970a5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.01.ddl.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.02.update.sqlpp
new file mode 100644
index 0000000..4bec537
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.02.update.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+COPY (
+ select "123" as id
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-simple")
+TYPE ( {id:string} )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet",
+ "version" : "2"
+};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.03.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.03.ddl.sqlpp
new file mode 100644
index 0000000..a5d7789
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.03.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+CREATE EXTERNAL DATASET DatasetCopy(ColumnType2) USING S3
+(
+("accessKeyId"="dummyAccessKey"),
+("secretAccessKey"="dummySecretKey"),
+("sessionToken"="dummySessionToken"),
+("region"="us-west-2"),
+ ("serviceEndpoint"="http://127.0.0.1:8001"),
+ ("container"="playground"),
+ ("definition"="copy-to-result/parquet-simple"),
+ ("format" = "parquet"),
+ ("requireVersionChangeDetection"="false"),
+ ("include"="*.parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.04.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.04.query.sqlpp
new file mode 100644
index 0000000..5aeedb8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-simple/parquet-simple.04.query.sqlpp
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT id
+FROM DatasetCopy c
+ORDER BY c.id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.01.ddl.sqlpp
new file mode 100644
index 0000000..f890e0d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+ id: string
+};
+
+CREATE DATASET DummyTweetDataset(ColumnType1)
+PRIMARY KEY id WITH {
+ "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.02.update.sqlpp
new file mode 100644
index 0000000..83a1140
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET DummyTweetDataset USING localfs
+(
+ ("path" = "asterix_nc1://data/hdfs/parquet/dummy_tweet.json"),
+ ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.03.update.sqlpp
new file mode 100644
index 0000000..aed4e09
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.03.update.sqlpp
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ SELECT c.* FROM DummyTweetDataset c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-tweet")
+TYPE ( {
+ coordinates: {
+ coordinates: [
+ double
+ ],
+ `type` : string
+ },
+ created_at: string,
+ entities: {
+ urls: [
+ {
+ display_url: string,
+ expanded_url: string,
+ indices: [
+ int
+ ],
+ url: string
+ }
+ ],
+ user_mentions: [
+ {
+ id: int,
+ id_str: string,
+ indices: [
+ int
+ ],
+ name: string,
+ screen_name: string
+ }
+ ]
+ },
+ favorite_count: int,
+ favorited: boolean,
+ filter_level: string,
+ geo: {
+ coordinates: [
+ double
+ ],
+ `type`: string
+ },
+ id: string,
+ id_str: string,
+ in_reply_to_screen_name: string,
+ in_reply_to_status_id: int,
+ in_reply_to_status_id_str: string,
+ in_reply_to_user_id: int,
+ in_reply_to_user_id_str: string,
+ is_quote_status: boolean,
+ lang: string,
+ place: {
+ bounding_box: {
+ coordinates: [
+ [
+ [
+ double
+ ]
+ ]
+ ],
+ `type`: string
+ },
+ country: string,
+ country_code: string,
+ full_name: string,
+ id: string,
+ name: string,
+ place_type: string,
+ url: string
+ },
+ possibly_sensitive: boolean,
+ quoted_status: {
+ created_at: string,
+ entities: {
+ user_mentions: [
+ {
+ id: int,
+ id_str: string,
+ indices: [
+ int
+ ],
+ name: string,
+ screen_name: string
+ }
+ ]
+ },
+ favorite_count: int,
+ favorited: boolean,
+ filter_level: string,
+ id: int,
+ id_str: string,
+ in_reply_to_screen_name: string,
+ in_reply_to_status_id: int,
+ in_reply_to_status_id_str: string,
+ in_reply_to_user_id: int,
+ in_reply_to_user_id_str: string,
+ is_quote_status: boolean,
+ lang: string,
+ retweet_count: int,
+ retweeted: boolean,
+ source: string,
+ text: string,
+ truncated: boolean,
+ user: {
+ contributors_enabled: boolean,
+ created_at: string,
+ default_profile: boolean,
+ default_profile_image: boolean,
+ description: string,
+ favourites_count: int,
+ followers_count: int,
+ friends_count: int,
+ geo_enabled: boolean,
+ id: int,
+ id_str: string,
+ is_translator: boolean,
+ lang: string,
+ listed_count: int,
+ name: string,
+ profile_background_color: string,
+ profile_background_image_url: string,
+ profile_background_image_url_https: string,
+ profile_background_tile: boolean,
+ profile_banner_url: string,
+ profile_image_url: string,
+ profile_image_url_https: string,
+ profile_link_color: string,
+ profile_sidebar_border_color: string,
+ profile_sidebar_fill_color: string,
+ profile_text_color: string,
+ profile_use_background_image: boolean,
+ protected: boolean,
+ screen_name: string,
+ statuses_count: int,
+ verified: boolean
+ }
+ },
+ quoted_status_id: int,
+ quoted_status_id_str: string,
+ retweet_count: int,
+ retweeted: boolean,
+ source: string,
+ text: string,
+ timestamp_ms: string,
+ truncated: boolean,
+ user: {
+ contributors_enabled: boolean,
+ created_at: string,
+ default_profile: boolean,
+ default_profile_image: boolean,
+ description: string,
+ favourites_count: int,
+ followers_count: int,
+ friends_count: int,
+ geo_enabled: boolean,
+ id: int,
+ id_str: string,
+ is_translator: boolean,
+ lang: string,
+ listed_count: int,
+ location: string,
+ name: string,
+ profile_background_color: string,
+ profile_background_image_url: string,
+ profile_background_image_url_https: string,
+ profile_background_tile: boolean,
+ profile_banner_url: string,
+ profile_image_url: string,
+ profile_image_url_https: string,
+ profile_link_color: string,
+ profile_sidebar_border_color: string,
+ profile_sidebar_fill_color: string,
+ profile_text_color: string,
+ profile_use_background_image: boolean,
+ protected: boolean,
+ screen_name: string,
+ statuses_count: int,
+ time_zone: string,
+ url: string,
+ utc_offset: int,
+ verified: boolean
+ }
+ } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.04.ddl.sqlpp
new file mode 100644
index 0000000..1cf0c78
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.04.ddl.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET DummyTweetDatasetCopy(ColumnType2) USING S3
+(
+ ("serviceEndpoint"="http://127.0.0.1:8001"),
+ ("region"="us-west-2"),
+ ("container"="playground"),
+ ("definition"="copy-to-result/parquet-tweet/"),
+ ("include"="*.parquet"),
+ ("requireVersionChangeDetection"="false"),
+ ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.05.query.sqlpp
new file mode 100644
index 0000000..13587f6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-tweet/parquet-tweet.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM DummyTweetDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.01.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.01.ddl.sqlpp
new file mode 100644
index 0000000..dfc64ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.01.ddl.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test if exists;
+CREATE DATAVERSE test;
+USE test;
+
+CREATE TYPE ColumnType1 AS {
+ id: int
+};
+
+CREATE DATASET NameCommentDataset(ColumnType1)
+PRIMARY KEY id WITH {
+ "storage-format": {"format" : "column"}
+};
+
+
+CREATE TYPE ColumnType2 AS {
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.02.update.sqlpp
new file mode 100644
index 0000000..8591369
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.02.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+LOAD DATASET NameCommentDataset USING localfs
+(
+ ("path" = "asterix_nc1://data/hdfs/parquet/id_name_comment.json"),
+ ("format" = "json")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.03.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.03.update.sqlpp
new file mode 100644
index 0000000..9a1c9a4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.03.update.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+COPY (
+ SELECT c.* FROM NameCommentDataset c
+) toWriter
+TO S3
+PATH ("copy-to-result", "parquet-utf8")
+TYPE ( { comment:string, id:bigint, name:string } )
+WITH {
+ "accessKeyId":"dummyAccessKey",
+ "secretAccessKey":"dummySecretKey",
+ "region":"us-west-2",
+ "serviceEndpoint":"http://127.0.0.1:8001",
+ "container":"playground",
+ "format":"parquet"
+};
+
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.04.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.04.ddl.sqlpp
new file mode 100644
index 0000000..4fd41f6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.04.ddl.sqlpp
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+
+
+CREATE EXTERNAL DATASET NameCommentDatasetCopy(ColumnType2) USING S3
+(
+ ("serviceEndpoint"="http://127.0.0.1:8001"),
+ ("region"="us-west-2"),
+ ("container"="playground"),
+ ("definition"="copy-to-result/parquet-utf8/"),
+ ("include"="*.parquet"),
+ ("requireVersionChangeDetection"="false"),
+ ("format" = "parquet")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.05.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.05.query.sqlpp
new file mode 100644
index 0000000..17cd027
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/copy-to/parquet-utf8/parquet-utf8.05.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE test;
+
+
+SELECT c.*
+FROM NameCommentDatasetCopy c
+ORDER BY c.id;
+
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp
index 3ab3bc6..6bc1564 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/datatype/primitive.02.update.sqlpp
@@ -27,7 +27,13 @@
{"id": 129, "myGeometry": st_geom_from_geojson({"type": "MultiPolygon","coordinates": [[[[40, 40], [20, 45], [45, 30], [40, 40]]],[[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]],[[30, 20], [20, 15], [20, 25], [30, 20]]]]})},
{"id": 130, "myGeometry": st_make_point(-71.1043443253471, 42.3150676015829)},
{"id": 131, "myGeometry": st_make_point(1.0,2.0,3.0)},
-{"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)},
+/* The query below is intentionally commented as JTS WKBWriter does not support 4 dimensional coordinates https://github.com/locationtech/jts/issues/733, If uncommented add the below assertions to the files mentioned
+line 9 at result.03.adm should have the result { "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4]} } }
+line 9 at result.04.adm should have the result {{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 4, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("01B90B0000000000000000F03F000000000000004000000000000008400000000000001040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3,4]}", "WKT": "POINT ZM (1 2 3 4)" }
+line 4 at result.05.adm should have the result { "X": 1.0, "Y": 2.0, "Z": 3.0, "M": 4.0 }
+line 9 at result.09.adm should have the result { "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
+*/
+/* {"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)}, */
{"id": 133, "myGeometry": st_geom_from_text('POLYGON((743238 2967416,743238 2967450,743265 2967450,743265.625 2967416,743238 2967416))')},
{"id": 134, "myGeometry": st_geom_from_wkb(hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"))},
{"id": 135, "myGeometry": st_line_from_multipoint(st_geom_from_text('MULTIPOINT(1 2 , 4 5 , 7 8 )'))},
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp
index bdfef62..02d5064 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/index/index.19.update.sqlpp
@@ -27,7 +27,11 @@
{"id": 129, "myGeometry": st_geom_from_geojson({"type": "MultiPolygon","coordinates": [[[[40, 40], [20, 45], [45, 30], [40, 40]]],[[[20, 35], [10, 30], [10, 10], [30, 5], [45, 20], [20, 35]],[[30, 20], [20, 15], [20, 25], [30, 20]]]]})},
{"id": 130, "myGeometry": st_make_point(-71.1043443253471, 42.3150676015829)},
{"id": 131, "myGeometry": st_make_point(1.0,2.0,3.0)},
-{"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)},
+/* The query below is intentionally commented as JTS WKBWriter does not support 4 dimensional coordinates https://github.com/locationtech/jts/issues/733, If uncommented add the below assertions to the files mentioned
+line 9 at result.20.adm should have the result { "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4]} } }
+line 2 at result.21.adm should have the result 132
+*/
+/* {"id": 132, "myGeometry": st_make_point(1.0,2.0,3.0,4.0)}, */
{"id": 133, "myGeometry": st_geom_from_text('POLYGON((743238 2967416,743238 2967450,743265 2967450,743265.625 2967416,743238 2967416))')},
{"id": 134, "myGeometry": st_geom_from_wkb(hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"))},
{"id": 135, "myGeometry": st_line_from_multipoint(st_geom_from_text('MULTIPOINT(1 2 , 4 5 , 7 8 )'))},
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/single-method/predicate.15.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/single-method/predicate.15.query.sqlpp
new file mode 100644
index 0000000..c1637ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/geojson/single-method/predicate.15.query.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+st_z_max(st_geom_from_text("POLYGON EMPTY"));
+st_z_min(st_geom_from_text("POLYGON EMPTY"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 0, 0 0 0))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 0, 0 0 0))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 1, 1 1 2, 1 -1 3, 0 0 1))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 1, 1 1 2, 1 -1 3, 0 0 1))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 -2, 1 -1 -3, 0 0 -1))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 -2, 1 -1 -3, 0 0 -1))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 0, 1 -1 1, 0 0 -1))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 -1, 1 1 0, 1 -1 1, 0 0 -1))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 2, 0 0 0))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 0, 1 -1 2, 0 0 0))"));
+st_z_max(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 -1, 1 -1 0, 0 0 0))"));
+st_z_min(st_geom_from_text("POLYGON Z ((0 0 0, 1 1 -1, 1 -1 0, 0 0 0))"));
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.1.ddl.sqlpp
new file mode 100644
index 0000000..4271f99
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.1.ddl.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Description: This test case is to verify the fix for ASTERIXDB-3419
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use test;
+
+create type dt1 as {itemid: int};
+
+create dataset collection1(dt1) PRIMARY KEY itemid;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.2.update.sqlpp
new file mode 100644
index 0000000..d805bd0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.2.update.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use test;
+
+insert into collection1
+([
+ {
+ "categories": "Category 1, Category 2, Category 6, Category 9, Category 10, Category 12, Category 13, Category 14, Category 16, Category 17, Category 18, Category 19, Category 20, Category 21, Category 22",
+ "itemid":10,
+ "description":"ABC"
+ },
+ {
+ "categories": "Category 1, Category 3, Category 5, Category 7, Category 8, Category 9, Category 10, Category 19, Category 10, Category 20, Category 21, Category 22, Category 23, Category 31, Category 32",
+ "itemid":12,
+ "description":"XYZ"
+ }
+]);
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.query.sqlpp
new file mode 100644
index 0000000..9081a11
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.query.sqlpp
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * Description: This test case is to verify the fix for ASTERIXDB-3410
+ */
+
+use test;
+
+SELECT VALUE OBJECT_REMOVE(t, 'categories')
+FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 1")) AS `Category 1`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 2")) AS `Category 2`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 3")) AS `Category 3`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 4")) AS `Category 4`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 5")) AS `Category 5`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 6")) AS `Category 6`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 7")) AS `Category 7`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 8")) AS `Category 8`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 9")) AS `Category 9`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 10")) AS `Category 10`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 11")) AS `Category 11`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 12")) AS `Category 12`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 13")) AS `Category 13`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 14")) AS `Category 14`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 15")) AS `Category 15`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 16")) AS `Category 16`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 17")) AS `Category 17`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 18")) AS `Category 18`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 19")) AS `Category 19`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 20")) AS `Category 20`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 21")) AS `Category 21`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 22")) AS `Category 22`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 23")) AS `Category 23`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 24")) AS `Category 24`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 25")) AS `Category 25`
+ FROM (
+ SELECT t.*,
+ to_bigint(CONTAINS(categories, "Category 26")) AS `Category 26`
+ FROM collection1 t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ) AS t ORDER BY itemid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_01/countn_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_01/countn_01.1.adm
new file mode 100644
index 0000000..00750ed
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_01/countn_01.1.adm
@@ -0,0 +1 @@
+3
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_02/countn_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_02/countn_02.1.adm
new file mode 100644
index 0000000..565b903
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_02/countn_02.1.adm
@@ -0,0 +1,7 @@
+3
+2
+1
+1
+0
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_distinct/countn_distinct.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_distinct/countn_distinct.1.adm
new file mode 100644
index 0000000..c1f0a1c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_distinct/countn_distinct.1.adm
@@ -0,0 +1 @@
+{ "t1": 3, "t2": 0, "t3": 2, "t4": 3, "t5": 0 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_01/countn_empty_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_01/countn_empty_01.1.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_01/countn_empty_01.1.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_02/countn_empty_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_02/countn_empty_02.1.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_empty_02/countn_empty_02.1.adm
@@ -0,0 +1 @@
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_null/countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_null/countn_null.1.adm
new file mode 100644
index 0000000..3c4c7ec
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/countn_null/countn_null.1.adm
@@ -0,0 +1 @@
+{ "sql-count": 3 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn/scalar_count.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn/scalar_count.1.adm
new file mode 100644
index 0000000..3230e04
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn/scalar_count.1.adm
@@ -0,0 +1,7 @@
+3
+3
+3
+3
+3
+3
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_empty/scalar_countn_empty.1.adm
@@ -0,0 +1 @@
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_null/scalar_countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_null/scalar_countn_null.1.adm
new file mode 100644
index 0000000..b4f89e0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/scalar_countn_null/scalar_countn_null.1.adm
@@ -0,0 +1,10 @@
+3
+3
+3
+3
+3
+3
+0
+0
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_01/countn_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_01/countn_01.1.adm
new file mode 100644
index 0000000..1d6b377
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_01/countn_01.1.adm
@@ -0,0 +1,4 @@
+3
+3
+{ "$1": 2, "n": "a" }
+{ "$1": 1, "n": "b" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_02/countn_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_02/countn_02.1.adm
new file mode 100644
index 0000000..65ede1c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_02/countn_02.1.adm
@@ -0,0 +1,7 @@
+3
+null
+1
+null
+null
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_distinct/countn_distinct.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_distinct/countn_distinct.1.adm
new file mode 100644
index 0000000..ae0b195
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_distinct/countn_distinct.1.adm
@@ -0,0 +1 @@
+{ "t1": 3, "t2": null, "t3": null, "t4": 3, "t5": 0 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_01/countn_empty_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_01/countn_empty_01.1.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_01/countn_empty_01.1.adm
@@ -0,0 +1 @@
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_02/countn_empty_02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_02/countn_empty_02.1.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_empty_02/countn_empty_02.1.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_null/countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_null/countn_null.1.adm
new file mode 100644
index 0000000..5fb8b09
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/countn_null/countn_null.1.adm
@@ -0,0 +1 @@
+{ "count": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn/scalar_countn.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn/scalar_countn.1.adm
new file mode 100644
index 0000000..3230e04
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn/scalar_countn.1.adm
@@ -0,0 +1,7 @@
+3
+3
+3
+3
+3
+3
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_empty/scalar_countn_empty.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_empty/scalar_countn_empty.1.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_empty/scalar_countn_empty.1.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_null/scalar_countn_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_null/scalar_countn_null.1.adm
new file mode 100644
index 0000000..b533851
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_countn_null/scalar_countn_null.1.adm
@@ -0,0 +1,10 @@
+null
+null
+null
+null
+null
+null
+null
+null
+0
+0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson
index 7c4f553..b3f8c7c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cache-residency/cache-residency.002.regexjson
@@ -4,7 +4,7 @@
"*": "*"
},
"type": "application/x-adm",
- "results": [ "{ \"$1\": 17 }" ],
+ "results": [ "R{.*}" ],
"plans": "R{.*}",
"status": "success",
"metrics": {
@@ -14,7 +14,7 @@
"queueWaitTime": "R{.*}",
"resultCount": 1,
"resultSize": "R{.*}",
- "processedObjects": 17,
+ "processedObjects": "R{.*}",
"bufferCacheHitRatio": "100.00%",
"bufferCachePageReadCount": 1
}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
index 52981ae..c364a58 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
@@ -41,6 +41,7 @@
"compiler\.indexonly" : true,
"compiler\.internal\.sanitycheck" : true,
"compiler\.joinmemory" : 262144,
+ "compiler\.max\.variable\.occurrences\.inlining" : 128,
"compiler.min.groupmemory" : 524288,
"compiler.min.joinmemory" : 524288,
"compiler\.min\.memory\.allocation" : true,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
index 0d4967a..9e85426 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
@@ -41,6 +41,7 @@
"compiler\.indexonly" : true,
"compiler\.internal\.sanitycheck" : false,
"compiler\.joinmemory" : 262144,
+ "compiler\.max\.variable\.occurrences\.inlining" : 128,
"compiler.min.groupmemory" : 524288,
"compiler.min.joinmemory" : 524288,
"compiler\.min\.memory\.allocation" : true,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
index 0295ab2..ac00566 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
@@ -41,6 +41,7 @@
"compiler\.indexonly" : true,
"compiler\.internal\.sanitycheck" : false,
"compiler\.joinmemory" : 262144,
+ "compiler\.max\.variable\.occurrences\.inlining" : 128,
"compiler.min.groupmemory" : 524288,
"compiler.min.joinmemory" : 524288,
"compiler\.min\.memory\.allocation" : true,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.adm
new file mode 100644
index 0000000..0e8c7ab
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/array_fun/array_slice/array_slice_bracket_notation/array_slice_bracket_notation.3.adm
@@ -0,0 +1 @@
+{ "t1": 2, "t2": [ 2, 3 ], "t3": null, "t4": null, "t5": null, "t6": [ 3 ], "t7": [ 2, 3, 4 ], "t8": [ 2, 3, 4, 5 ], "t9": null, "t10": null, "t11": [ 0, 1, 2, 3, 4, 5 ], "t12": 0, "t13": [ 0, 1, 2, 3 ], "t14": null, "t15": null, "t16": null, "t17": null, "t18": [ [ 1, 2 ] ], "t19": [ [ 1, 2, 3, 4, 5 ] ], "t20": [ [ 0, 1, 2 ] ], "t21": [ [ 0, 1, 2, 3, 4, 5 ] ], "t22": [ [ 0, 1, 2, 3, 4, 5 ] ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.adm
new file mode 100644
index 0000000..8fc863e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-cover-data-types/parquet-cover-data-types.05.adm
@@ -0,0 +1 @@
+{ "name": "Virat", "id": 18, "dateType": date("1988-11-05"), "timeType": time("03:10:00.493"), "boolType": false, "doubleType": 0.75, "datetimeType": datetime("1900-02-01T00:00:00.000") }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-empty-array/parquet-empty-array.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-empty-array/parquet-empty-array.05.adm
new file mode 100644
index 0000000..97de7e9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-empty-array/parquet-empty-array.05.adm
@@ -0,0 +1,2 @@
+{ "id": 2, "name": { "first": [ "power", "star" ] } }
+{ "id": 5, "name": { "first": [ ] } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-simple/parquet-simple.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-simple/parquet-simple.04.adm
new file mode 100644
index 0000000..bf567b2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-simple/parquet-simple.04.adm
@@ -0,0 +1 @@
+{ "id": "123" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-tweet/parquet-tweet.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-tweet/parquet-tweet.05.adm
new file mode 100644
index 0000000..5e0df96
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-tweet/parquet-tweet.05.adm
@@ -0,0 +1,2 @@
+{ "coordinates": { "coordinates": [ 1.1 ], "type": "string" }, "created_at": "string", "entities": { "urls": [ { "display_url": "string", "expanded_url": "string", "indices": [ 1 ], "url": "string" } ], "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "geo": { "coordinates": [ 1.1 ], "type": "string" }, "id": "0000000", "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "place": { "bounding_box": { "coordinates": [ [ [ 1.1 ] ] ], "type": "string" }, "country": "string", "country_code": "string", "full_name": "string", "id": "string", "name": "string", "place_type": "string", "url": "string" }, "possibly_sensitive": true, "quoted_status": { "created_at": "string", "entities": { "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "id": 1, "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "verified": true } }, "quoted_status_id": 1, "quoted_status_id_str": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "timestamp_ms": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "location": "string", "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "time_zone": "string", "url": "string", "utc_offset": 1, "verified": true } }
+{ "coordinates": { "coordinates": [ 1.1 ], "type": "string" }, "created_at": "string", "favorite_count": 1, "favorited": true, "filter_level": "string", "geo": { "coordinates": [ 1.1 ], "type": "string" }, "id": "11111111111111111111", "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "place": { "bounding_box": { "coordinates": [ [ [ 1.1 ] ] ], "type": "string" }, "country": "string", "country_code": "string", "full_name": "string", "id": "string", "name": "string", "place_type": "string", "url": "string" }, "possibly_sensitive": true, "quoted_status": { "created_at": "string", "entities": { "user_mentions": [ { "id": 1, "id_str": "string", "indices": [ 1 ], "name": "string", "screen_name": "string" } ] }, "favorite_count": 1, "favorited": true, "filter_level": "string", "id": 1, "id_str": "string", "in_reply_to_screen_name": "string", "in_reply_to_status_id": 1, "in_reply_to_status_id_str": "string", "in_reply_to_user_id": 1, "in_reply_to_user_id_str": "string", "is_quote_status": true, "lang": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "verified": true } }, "quoted_status_id": 1, "quoted_status_id_str": "string", "retweet_count": 1, "retweeted": true, "source": "string", "text": "string", "timestamp_ms": "string", "truncated": true, "user": { "contributors_enabled": true, "created_at": "string", "default_profile": true, "default_profile_image": true, "description": "string", "favourites_count": 1, "followers_count": 1, "friends_count": 1, "geo_enabled": true, "id": 1, "id_str": "string", "is_translator": true, "lang": "string", "listed_count": 1, "location": "string", "name": "string", "profile_background_color": "string", "profile_background_image_url": "string", "profile_background_image_url_https": "string", "profile_background_tile": true, "profile_banner_url": "string", "profile_image_url": "string", "profile_image_url_https": "string", "profile_link_color": "string", "profile_sidebar_border_color": "string", "profile_sidebar_fill_color": "string", "profile_text_color": "string", "profile_use_background_image": true, "protected": true, "screen_name": "string", "statuses_count": 1, "time_zone": "string", "url": "string", "utc_offset": 1, "verified": true } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-utf8/parquet-utf8.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-utf8/parquet-utf8.05.adm
new file mode 100644
index 0000000..c60145d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/copy-to/parquet-utf8/parquet-utf8.05.adm
@@ -0,0 +1,8 @@
+{ "id": 1, "name": "John" }
+{ "id": 2, "name": "Abel" }
+{ "id": 3, "name": "Sandy" }
+{ "id": 4, "name": "Alex" }
+{ "id": 5, "name": "Mike" }
+{ "id": 6, "name": "Tom" }
+{ "comment": "😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا", "id": 7, "name": "Jerry" }
+{ "comment": "😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا😢😢💉💉 = 𩸽 😢😢💉💉. Coffee ☕‼️😃. حسنا", "id": 8, "name": "William" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm
index 2a16ab1..06c68c8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.03.adm
@@ -1,14 +1,13 @@
-{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]],"crs":null} } }
-{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]],"crs":null} } }
-{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[11,10],[11,11],[10,11],[10,10]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
+{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93]} } }
+{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]]} } }
+{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]]} } }
+{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]]} } }
+{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]} } }
+{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]} } }
+{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829]} } }
+{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3]} } }
+{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]]} } }
+{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]]} } }
+{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]]} } }
+{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[10,11],[11,11],[11,10],[10,10]]]} } }
+{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]} } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm
index 6cba06a..688fcf1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.04.adm
@@ -1,14 +1,13 @@
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -118.4, "XMin": -118.4, "YMax": 33.93, "YMin": 33.93, "Binary": hex("01010000009A99999999995DC0D7A3703D0AF74040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-118.4,33.93],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT (-118.4 33.93)" }
-{ "Type": "Polygon", "Area": 1.3755215000294761E-8, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 6, "XMax": 8.760178, "XMin": 8.7599721, "YMax": 49.7103478, "YMin": 49.7102133, "Binary": hex("01030000000100000007000000B1BE26101B852140ED20C033EBDA4840C11DA8531E8521407694394CEADA4840BAF8DB9E20852140D5F89683EADA484098EF2AB5288521409557F844E8DA48400247020D3685214041F74086E9DA484014F131B32A8521408DE43CADECDA4840B1BE26101B852140ED20C033EBDA4840"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((8.7599721 49.7103028, 8.759997 49.7102752, 8.7600145 49.7102818, 8.7600762 49.7102133, 8.760178 49.7102516, 8.7600914 49.7103478, 8.7599721 49.7103028))" }
-{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 13, "XMax": -69.1972972, "XMin": -69.199136, "YMax": -12.5998133, "YMin": -12.6010968, "Binary": hex("01020000000D00000081BF4EA0BE4C51C0CE80C4C0843329C033DDEBA4BE4C51C0D388997D1E3329C064D7ACE9B04C51C05787927F1C3329C02347DF49B04C51C0B9C49107223329C0F4DCE79DAF4C51C054B76922223329C0184339D1AE4C51C042A89C40223329C048E17A14AE4C51C010DBCCD71B3329C007DB3E9AA04C51C0636996BA1A3329C0E9656F84A04C51C05ADD45F35B3329C04E9B711AA24C51C033373A42613329C06CE22E0CA24C51C066D24B31AC3329C05F454607A44C51C065170CAEB93329C07C8C03F9A34C51C0E475B3F5C23329C0"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "LINESTRING (-69.1991349 -12.6006222, -69.199136 -12.599842, -69.1982979 -12.5998268, -69.1982598 -12.599869, -69.1982188 -12.5998698, -69.19817 -12.5998707, -69.198125 -12.5998218, -69.1973024 -12.5998133, -69.1972972 -12.6003109, -69.197394 -12.6003514, -69.1973906 -12.6009231, -69.1975115 -12.601026, -69.1975081 -12.6010968)" }
-{ "Type": "MultiPoint", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 4, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003E4001010000000000000000003440000000000000344001010000000000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiPoint\",\"coordinates\":[[10,40],[40,30],[20,20],[30,10]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))" }
-{ "Type": "MultiLineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 7, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003E400000000000003E40000000000000444000000000000034400000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiLineString\",\"coordinates\":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]],\"crs\":null}", "WKT": "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))" }
-{ "Type": "MultiPolygon", "Area": 712.5, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 11, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 5.0, "Binary": hex("01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003E4000000000000044400000000000004440010300000002000000060000000000000000003440000000000080414000000000000024400000000000003E40000000000000244000000000000024400000000000003E4000000000000014400000000000804640000000000000344000000000000034400000000000804140040000000000000000003E40000000000000344000000000000034400000000000002E40000000000000344000000000000039400000000000003E400000000000003440"), "GeoJSON": "{\"type\":\"MultiPolygon\",\"coordinates\":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]],\"crs\":null}", "WKT": "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), ((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), (30 20, 20 15, 20 25, 30 20)))" }
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -71.1043443253471, "XMin": -71.1043443253471, "YMax": 42.3150676015829, "YMin": 42.3150676015829, "Binary": hex("0101000000E538D293ADC651C0F3699A2254284540"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-71.1043443253471,42.3150676015829],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT (-71.1043443253471 42.3150676015829)" }
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 3, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("01E9030000000000000000F03F00000000000000400000000000000840"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT Z (1 2 3)" }
-{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 4, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("01B90B0000000000000000F03F000000000000004000000000000008400000000000001040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3,4],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POINT ZM (1 2 3 4)" }
-{ "Type": "Polygon", "Area": 928.625, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 743265.625, "XMin": 743238.0, "YMax": 2967450.0, "YMin": 2967416.0, "Binary": hex("01030000000100000005000000000000008CAE264100000000BCA3464100000040C3AE264100000000BCA3464100000000C2AE264100000000CDA34641000000008CAE264100000000CDA34641000000008CAE264100000000BCA34641"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((743238 2967416, 743265.625 2967416, 743265 2967450, 743238 2967450, 743238 2967416))" }
-{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 2, "XMax": -113.98, "XMin": -113.981, "YMax": 39.198, "YMin": 39.195, "Binary": hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-113.98,39.198],[-113.981,39.195]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "LINESTRING (-113.98 39.198, -113.981 39.195)" }
-{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 3, "XMax": 7.0, "XMin": 1.0, "YMax": 8.0, "YMin": 2.0, "Binary": hex("010200000003000000000000000000F03F0000000000000040000000000000104000000000000014400000000000001C400000000000002040"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[1,2],[4,5],[7,8]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "LINESTRING (1 2, 4 5, 7 8)" }
-{ "Type": "Polygon", "Area": 1.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 11.0, "XMin": 10.0, "YMax": 11.0, "YMin": 10.0, "Binary": hex("010300000001000000050000000000000000002440000000000000244000000000000026400000000000002440000000000000264000000000000026400000000000002440000000000000264000000000000024400000000000002440"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[10,10],[11,10],[11,11],[10,11],[10,10]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((10 10, 11 10, 11 11, 10 11, 10 10))" }
-{ "Type": "Polygon", "Area": 675.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 7, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 10.0, "Binary": hex("0103000000020000000500000000000000008041400000000000002440000000000080464000000000008046400000000000002E40000000000000444000000000000024400000000000003440000000000080414000000000000024400400000000000000000034400000000000003E40000000000080414000000000008041400000000000003E40000000000000344000000000000034400000000000003E40"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]],\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}", "WKT": "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))" }
+{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -118.4, "XMin": -118.4, "YMax": 33.93, "YMin": 33.93, "Binary": hex("01010000009A99999999995DC0D7A3703D0AF74040"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-118.4,33.93]}", "WKT": "POINT (-118.4 33.93)" }
+{ "Type": "Polygon", "Area": 1.3755215000294761E-8, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 6, "XMax": 8.760178, "XMin": 8.7599721, "YMax": 49.7103478, "YMin": 49.7102133, "Binary": hex("01030000000100000007000000B1BE26101B852140ED20C033EBDA4840C11DA8531E8521407694394CEADA4840BAF8DB9E20852140D5F89683EADA484098EF2AB5288521409557F844E8DA48400247020D3685214041F74086E9DA484014F131B32A8521408DE43CADECDA4840B1BE26101B852140ED20C033EBDA4840"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]]}", "WKT": "POLYGON ((8.7599721 49.7103028, 8.759997 49.7102752, 8.7600145 49.7102818, 8.7600762 49.7102133, 8.760178 49.7102516, 8.7600914 49.7103478, 8.7599721 49.7103028))" }
+{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 13, "XMax": -69.1972972, "XMin": -69.199136, "YMax": -12.5998133, "YMin": -12.6010968, "Binary": hex("01020000000D00000081BF4EA0BE4C51C0CE80C4C0843329C033DDEBA4BE4C51C0D388997D1E3329C064D7ACE9B04C51C05787927F1C3329C02347DF49B04C51C0B9C49107223329C0F4DCE79DAF4C51C054B76922223329C0184339D1AE4C51C042A89C40223329C048E17A14AE4C51C010DBCCD71B3329C007DB3E9AA04C51C0636996BA1A3329C0E9656F84A04C51C05ADD45F35B3329C04E9B711AA24C51C033373A42613329C06CE22E0CA24C51C066D24B31AC3329C05F454607A44C51C065170CAEB93329C07C8C03F9A34C51C0E475B3F5C23329C0"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]]}", "WKT": "LINESTRING (-69.1991349 -12.6006222, -69.199136 -12.599842, -69.1982979 -12.5998268, -69.1982598 -12.599869, -69.1982188 -12.5998698, -69.19817 -12.5998707, -69.198125 -12.5998218, -69.1973024 -12.5998133, -69.1972972 -12.6003109, -69.197394 -12.6003514, -69.1973906 -12.6009231, -69.1975115 -12.601026, -69.1975081 -12.6010968)" }
+{ "Type": "MultiPoint", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 4, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010400000004000000010100000000000000000024400000000000004440010100000000000000000044400000000000003E4001010000000000000000003440000000000000344001010000000000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiPoint\",\"coordinates\":[[10,40],[40,30],[20,20],[30,10]]}", "WKT": "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))" }
+{ "Type": "MultiLineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 7, "XMax": 40.0, "XMin": 10.0, "YMax": 40.0, "YMin": 10.0, "Binary": hex("010500000002000000010200000003000000000000000000244000000000000024400000000000003440000000000000344000000000000024400000000000004440010200000004000000000000000000444000000000000044400000000000003E400000000000003E40000000000000444000000000000034400000000000003E400000000000002440"), "GeoJSON": "{\"type\":\"MultiLineString\",\"coordinates\":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}", "WKT": "MULTILINESTRING ((10 10, 20 20, 10 40), (40 40, 30 30, 40 20, 30 10))" }
+{ "Type": "MultiPolygon", "Area": 712.5, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 11, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 5.0, "Binary": hex("01060000000200000001030000000100000004000000000000000000444000000000000044400000000000003440000000000080464000000000008046400000000000003E4000000000000044400000000000004440010300000002000000060000000000000000003440000000000080414000000000000024400000000000003E40000000000000244000000000000024400000000000003E4000000000000014400000000000804640000000000000344000000000000034400000000000804140040000000000000000003E40000000000000344000000000000034400000000000002E40000000000000344000000000000039400000000000003E400000000000003440"), "GeoJSON": "{\"type\":\"MultiPolygon\",\"coordinates\":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]}", "WKT": "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), ((20 35, 10 30, 10 10, 30 5, 45 20, 20 35), (30 20, 20 15, 20 25, 30 20)))" }
+{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 0, "NPoints": 1, "XMax": -71.1043443253471, "XMin": -71.1043443253471, "YMax": 42.3150676015829, "YMin": 42.3150676015829, "Binary": hex("0101000000E538D293ADC651C0F3699A2254284540"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[-71.1043443253471,42.3150676015829]}", "WKT": "POINT (-71.1043443253471 42.3150676015829)" }
+{ "Type": "Point", "Area": 0.0, "Coordinate dimension": 3, "Dimension": 0, "NPoints": 1, "XMax": 1.0, "XMin": 1.0, "YMax": 2.0, "YMin": 2.0, "Binary": hex("0101000080000000000000F03F00000000000000400000000000000840"), "GeoJSON": "{\"type\":\"Point\",\"coordinates\":[1,2,3]}", "WKT": "POINT Z(1 2 3)" }
+{ "Type": "Polygon", "Area": 928.625, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 743265.625, "XMin": 743238.0, "YMax": 2967450.0, "YMin": 2967416.0, "Binary": hex("01030000000100000005000000000000008CAE264100000000BCA34641000000008CAE264100000000CDA3464100000000C2AE264100000000CDA3464100000040C3AE264100000000BCA34641000000008CAE264100000000BCA34641"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]]}", "WKT": "POLYGON ((743238 2967416, 743238 2967450, 743265 2967450, 743265.625 2967416, 743238 2967416))" }
+{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 2, "XMax": -113.98, "XMin": -113.981, "YMax": 39.198, "YMin": 39.195, "Binary": hex("0102000000020000001F85EB51B87E5CC0D34D621058994340105839B4C87E5CC0295C8FC2F5984340"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[-113.98,39.198],[-113.981,39.195]]}", "WKT": "LINESTRING (-113.98 39.198, -113.981 39.195)" }
+{ "Type": "LineString", "Area": 0.0, "Coordinate dimension": 2, "Dimension": 1, "NPoints": 3, "XMax": 7.0, "XMin": 1.0, "YMax": 8.0, "YMin": 2.0, "Binary": hex("010200000003000000000000000000F03F0000000000000040000000000000104000000000000014400000000000001C400000000000002040"), "GeoJSON": "{\"type\":\"LineString\",\"coordinates\":[[1,2],[4,5],[7,8]]}", "WKT": "LINESTRING (1 2, 4 5, 7 8)" }
+{ "Type": "Polygon", "Area": 1.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 4, "XMax": 11.0, "XMin": 10.0, "YMax": 11.0, "YMin": 10.0, "Binary": hex("010300000001000000050000000000000000002440000000000000244000000000000024400000000000002640000000000000264000000000000026400000000000002640000000000000244000000000000024400000000000002440"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[10,10],[10,11],[11,11],[11,10],[10,10]]]}", "WKT": "POLYGON ((10 10, 10 11, 11 11, 11 10, 10 10))" }
+{ "Type": "Polygon", "Area": 675.0, "Coordinate dimension": 2, "Dimension": 2, "NPoints": 7, "XMax": 45.0, "XMin": 10.0, "YMax": 45.0, "YMin": 10.0, "Binary": hex("0103000000020000000500000000000000008041400000000000002440000000000080464000000000008046400000000000002E40000000000000444000000000000024400000000000003440000000000080414000000000000024400400000000000000000034400000000000003E40000000000080414000000000008041400000000000003E40000000000000344000000000000034400000000000003E40"), "GeoJSON": "{\"type\":\"Polygon\",\"coordinates\":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}", "WKT": "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm
index 0815bfa..5ea6fd5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.05.adm
@@ -1,4 +1,3 @@
-{ "X": -118.4, "Y": 33.93, "Z": 0.0, "M": NaN }
-{ "X": -71.1043443253471, "Y": 42.3150676015829, "Z": 0.0, "M": NaN }
-{ "X": 1.0, "Y": 2.0, "Z": 3.0, "M": NaN }
-{ "X": 1.0, "Y": 2.0, "Z": 3.0, "M": 4.0 }
\ No newline at end of file
+{ "X": -118.4, "Y": 33.93, "Z": NaN, "M": NaN }
+{ "X": -71.1043443253471, "Y": 42.3150676015829, "Z": NaN, "M": NaN }
+{ "X": 1.0, "Y": 2.0, "Z": 3.0, "M": NaN }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm
index 044e31a..39ced5e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.06.adm
@@ -1,4 +1,4 @@
-{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[10,10],[11,10],[11,11],[10,11],[10,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "NumInteriorRings": 1, "ExteriorRing": {"type":"LineString","coordinates":[[35,10],[45,45],[15,40],[10,20],[35,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
\ No newline at end of file
+{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]} }
+{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]} }
+{ "NumInteriorRings": 0, "ExteriorRing": {"type":"LineString","coordinates":[[10,10],[10,11],[11,11],[11,10],[10,10]]} }
+{ "NumInteriorRings": 1, "ExteriorRing": {"type":"LineString","coordinates":[[35,10],[45,45],[15,40],[10,20],[35,10]]} }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm
index 7967722..3a026d5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.07.adm
@@ -1,4 +1,4 @@
-{ "Length": 0.004058119099397876, "Boundary": {"type":"MultiPoint","coordinates":[[-69.1991349,-12.6006222],[-69.1975081,-12.6010968]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Length": 78.9292222699217, "Boundary": {"type":"MultiPoint","coordinates":[[10,10],[10,40],[40,40],[30,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Length": 0.0031622776601655037, "Boundary": {"type":"MultiPoint","coordinates":[[-113.98,39.198],[-113.981,39.195]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Length": 8.48528137423857, "Boundary": {"type":"MultiPoint","coordinates":[[1,2],[7,8]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
+{ "Length": 0.004058119099397876, "Boundary": {"type":"MultiPoint","coordinates":[[-69.1991349,-12.6006222],[-69.1975081,-12.6010968]]} }
+{ "Length": 78.9292222699217, "Boundary": {"type":"MultiPoint","coordinates":[[10,10],[10,40],[30,10],[40,40]]} }
+{ "Length": 0.0031622776601655037, "Boundary": {"type":"MultiPoint","coordinates":[[-113.98,39.198],[-113.981,39.195]]} }
+{ "Length": 8.48528137423857, "Boundary": {"type":"MultiPoint","coordinates":[[1,2],[7,8]]} }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm
index c0d2341..1987970 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.08.adm
@@ -1,3 +1,3 @@
-{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "StartPoint": {"type":"Point","coordinates":[-69.1991349,-12.6006222],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Envelope": {"type":"Polygon","coordinates":[[[-69.199136,-12.6010968],[-69.1972972,-12.6010968],[-69.1972972,-12.5998133],[-69.199136,-12.5998133],[-69.199136,-12.6010968]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "StartPoint": {"type":"Point","coordinates":[-113.98,39.198],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Envelope": {"type":"Polygon","coordinates":[[[-113.981,39.195],[-113.98,39.195],[-113.98,39.198],[-113.981,39.198],[-113.981,39.195]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "StartPoint": {"type":"Point","coordinates":[1,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Envelope": {"type":"Polygon","coordinates":[[[1,2],[7,2],[7,8],[1,8],[1,2]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
\ No newline at end of file
+{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842]}, "StartPoint": {"type":"Point","coordinates":[-69.1991349,-12.6006222]}, "Envelope": {"type":"Polygon","coordinates":[[[-69.199136,-12.6010968],[-69.199136,-12.5998133],[-69.1972972,-12.5998133],[-69.1972972,-12.6010968],[-69.199136,-12.6010968]]]} }
+{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842]}, "StartPoint": {"type":"Point","coordinates":[-113.98,39.198]}, "Envelope": {"type":"Polygon","coordinates":[[[-113.981,39.195],[-113.981,39.198],[-113.98,39.198],[-113.98,39.195],[-113.981,39.195]]]} }
+{ "PointN": {"type":"Point","coordinates":[-69.199136,-12.599842]}, "StartPoint": {"type":"Point","coordinates":[1,2]}, "Envelope": {"type":"Polygon","coordinates":[[[1,2],[1,8],[7,8],[7,2],[1,2]]]} }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm
index 893f2dc..8a42de2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/datatype/result.09.adm
@@ -7,7 +7,6 @@
{ "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
{ "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
{ "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
-{ "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
{ "IsClosed": false, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
{ "IsClosed": false, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
{ "IsClosed": true, "IsCollection": false, "IsEmpty": false, "IsSimple": true }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm
index ea141c9..bebded7 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.20.adm
@@ -1,14 +1,13 @@
-{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]],"crs":null} } }
-{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]],"crs":null} } }
-{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 132, "myGeometry": {"type":"Point","coordinates":[1,2,3,4],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743265.625,2967416],[743265,2967450],[743238,2967450],[743238,2967416]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[11,10],[11,11],[10,11],[10,10]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
-{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} } }
\ No newline at end of file
+{ "Geometries": { "id": 123, "myGeometry": {"type":"Point","coordinates":[-118.4,33.93]} } }
+{ "Geometries": { "id": 124, "myGeometry": {"type":"Polygon","coordinates":[[[8.7599721,49.7103028],[8.759997,49.7102752],[8.7600145,49.7102818],[8.7600762,49.7102133],[8.760178,49.7102516],[8.7600914,49.7103478],[8.7599721,49.7103028]]]} } }
+{ "Geometries": { "id": 126, "myGeometry": {"type":"LineString","coordinates":[[-69.1991349,-12.6006222],[-69.199136,-12.599842],[-69.1982979,-12.5998268],[-69.1982598,-12.599869],[-69.1982188,-12.5998698],[-69.19817,-12.5998707],[-69.198125,-12.5998218],[-69.1973024,-12.5998133],[-69.1972972,-12.6003109],[-69.197394,-12.6003514],[-69.1973906,-12.6009231],[-69.1975115,-12.601026],[-69.1975081,-12.6010968]]} } }
+{ "Geometries": { "id": 127, "myGeometry": {"type":"MultiPoint","coordinates":[[10,40],[40,30],[20,20],[30,10]]} } }
+{ "Geometries": { "id": 128, "myGeometry": {"type":"MultiLineString","coordinates":[[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]} } }
+{ "Geometries": { "id": 129, "myGeometry": {"type":"MultiPolygon","coordinates":[[[[40,40],[20,45],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]} } }
+{ "Geometries": { "id": 130, "myGeometry": {"type":"Point","coordinates":[-71.1043443253471,42.3150676015829]} } }
+{ "Geometries": { "id": 131, "myGeometry": {"type":"Point","coordinates":[1,2,3]} } }
+{ "Geometries": { "id": 133, "myGeometry": {"type":"Polygon","coordinates":[[[743238,2967416],[743238,2967450],[743265,2967450],[743265.625,2967416],[743238,2967416]]]} } }
+{ "Geometries": { "id": 134, "myGeometry": {"type":"LineString","coordinates":[[-113.98,39.198],[-113.981,39.195]]} } }
+{ "Geometries": { "id": 135, "myGeometry": {"type":"LineString","coordinates":[[1,2],[4,5],[7,8]]} } }
+{ "Geometries": { "id": 136, "myGeometry": {"type":"Polygon","coordinates":[[[10,10],[10,11],[11,11],[11,10],[10,10]]]} } }
+{ "Geometries": { "id": 137, "myGeometry": {"type":"Polygon","coordinates":[[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]} } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm
index 2ceb6e0..7628d62 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/index/result.21.adm
@@ -1,3 +1,2 @@
131
-132
135
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm
index a1e16dd..bfa005e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.10.adm
@@ -1 +1 @@
-{"type":"Polygon","coordinates":[[[-7,4.2],[-7.1,5],[-7.1,4.3],[-7,4.2]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}
\ No newline at end of file
+{"type":"Polygon","coordinates":[[[-7,4.2],[-7.1,5],[-7.1,4.3],[-7,4.2]]]}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.15.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.15.adm
new file mode 100644
index 0000000..9243a76
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/single-method/result.15.adm
@@ -0,0 +1,14 @@
+NaN
+NaN
+0.0
+0.0
+3.0
+1.0
+-1.0
+-3.0
+1.0
+-1.0
+2.0
+0.0
+0.0
+-1.0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm
index e353336..57d5885 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.04.adm
@@ -1,10 +1,10 @@
-{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[0,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[0,2],[1,2],[2,2]],[[2,2],[3,3]],[[3,3],[4,2],[5,2]],[[3,3],[4,4],[5,5],[6,6]]],"crs":null}, "Intersection": {"type":"LineString","coordinates":[[2,2],[3,3]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"MultiLineString","coordinates":[[[0,2],[1,2],[2,2],[1,1]],[[5,2],[4,2],[3,3],[4,4],[5,5],[6,6]]],"crs":null} }
-{ "Union": {"type":"LineString","coordinates":[[0,0],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[0,0],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"LineString","coordinates":[[1,1],[-1,-1],[2,3.5],[1,3],[1,2],[2,1]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[-1,-1],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[1,1],[-1,-1],[2,3.5],[1,3],[1,2],[2,1]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]]],"crs":null}, "Intersection": {"type":"Point","coordinates":[2,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]]],"crs":null} }
-{ "Union": {"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"MultiPolygon","coordinates":[],"crs":null}, "SymDifference": {"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"LineString","coordinates":[[0,0],[5,5],[10,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"LineString","coordinates":[[0,0],[5,5],[10,10]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"MultiLineString","coordinates":[],"crs":null} }
-{ "Union": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]],"crs":null}, "Intersection": {"type":"MultiPolygon","coordinates":[],"crs":null}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]],"crs":null} }
-{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Point","coordinates":[0,2],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
-{ "Union": {"type":"Polygon","coordinates":[[[0,0],[1,0],[1,1],[0,1],[0,0]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "Intersection": {"type":"Polygon","coordinates":[[[0.25,0.25],[0.5,0.25],[0.5,0.5],[0.25,0.5],[0.25,0.25]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}, "SymDifference": {"type":"Polygon","coordinates":[[[0,0],[1,0],[1,1],[0,1],[0,0]],[[0.25,0.25],[0.25,0.5],[0.5,0.5],[0.5,0.25],[0.25,0.25]]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}} }
+{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]}, "Intersection": {"type":"Point","coordinates":[0,2]}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[2,2],[3,3]],[[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[3,3],[4,2],[5,2]]]}, "Intersection": {"type":"LineString","coordinates":[[2,2],[3,3]]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[3,3],[4,2],[5,2]]]} }
+{ "Union": {"type":"LineString","coordinates":[[0,0],[0,2]]}, "Intersection": {"type":"Point","coordinates":[0,0]}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[0,2]]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[-1,-1],[1,2]],[[1,2],[2,3.5],[1,3],[1,2]],[[1,2],[2,1]]]}, "Intersection": {"type":"Point","coordinates":[-1,-1]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[-1,-1],[1,2]],[[1,2],[2,3.5],[1,3],[1,2]],[[1,2],[2,1]]]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]]]}, "Intersection": {"type":"Point","coordinates":[2,2]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,1],[2,2]],[[2,2],[3,3],[4,4],[5,5],[6,6]],[[0,2],[1,2],[2,2]],[[2,2],[3,2],[4,2],[5,2]]]} }
+{ "Union": {"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[0,0]},{"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]]}]}, "Intersection": , "SymDifference": {"type":"GeometryCollection","geometries":[{"type":"Point","coordinates":[0,0]},{"type":"LineString","coordinates":[[1,1],[2,2],[3,3],[4,4],[5,5],[6,6]]}]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[0,0],[5,5]],[[5,5],[10,10]]]}, "Intersection": {"type":"MultiLineString","coordinates":[[[0,0],[5,5]],[[5,5],[10,10]]]}, "SymDifference": {"type":"LineString","coordinates":[]} }
+{ "Union": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]]}, "Intersection": {"type":"LineString","coordinates":[]}, "SymDifference": {"type":"MultiLineString","coordinates":[[[1,2],[3,4]],[[5,6],[7,8]]]} }
+{ "Union": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]}, "Intersection": {"type":"Point","coordinates":[0,2]}, "SymDifference": {"type":"LineString","coordinates":[[0,0],[1,1],[0,2]]} }
+{ "Union": {"type":"Polygon","coordinates":[[[0,0],[0,1],[1,1],[1,0],[0,0]]]}, "Intersection": {"type":"Polygon","coordinates":[[[0.25,0.25],[0.25,0.5],[0.5,0.5],[0.5,0.25],[0.25,0.25]]]}, "SymDifference": {"type":"Polygon","coordinates":[[[0,0],[0,1],[1,1],[1,0],[0,0]],[[0.25,0.25],[0.5,0.25],[0.5,0.5],[0.25,0.5],[0.25,0.25]]]} }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm
index 5123e09..d7e9872 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/geojson/two-geometries/result.05.adm
@@ -1 +1 @@
-{"type":"MultiPoint","coordinates":[[1,1],[1,2]],"crs":{"type":"name","properties":{"name":"EPSG:4326"}}}
\ No newline at end of file
+{"type":"MultiPoint","coordinates":[[1,1],[1,2]]}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.adm
new file mode 100644
index 0000000..2c44767
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-3410/query-ASTERIXDB-3410.3.adm
@@ -0,0 +1,2 @@
+{ "Category 1": 1, "Category 2": 1, "Category 3": 0, "Category 4": 0, "Category 5": 0, "Category 6": 1, "Category 7": 0, "Category 8": 0, "Category 9": 1, "Category 10": 1, "Category 11": 0, "Category 12": 1, "Category 13": 1, "Category 14": 1, "Category 15": 0, "Category 16": 1, "Category 17": 1, "Category 18": 1, "Category 19": 1, "Category 20": 1, "Category 21": 1, "Category 22": 1, "Category 23": 0, "Category 24": 0, "Category 25": 0, "Category 26": 0, "itemid": 10, "description": "ABC" }
+{ "Category 1": 1, "Category 2": 1, "Category 3": 1, "Category 4": 0, "Category 5": 1, "Category 6": 0, "Category 7": 1, "Category 8": 1, "Category 9": 1, "Category 10": 1, "Category 11": 0, "Category 12": 0, "Category 13": 0, "Category 14": 0, "Category 15": 0, "Category 16": 0, "Category 17": 0, "Category 18": 0, "Category 19": 1, "Category 20": 1, "Category 21": 1, "Category 22": 1, "Category 23": 1, "Category 24": 0, "Category 25": 0, "Category 26": 0, "itemid": 12, "description": "XYZ" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan
index 7fe9bcc..6428718 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.042.plan
@@ -1,72 +1,72 @@
-distribute result [$$70] [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+distribute result [$$70] [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
-- DISTRIBUTE_RESULT |UNPARTITIONED|
- exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+ exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
-- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- aggregate [$$70] <- [agg-sql-sum($$76)] [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+ aggregate [$$70] <- [agg-sql-sum($$76)] [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
-- AGGREGATE |UNPARTITIONED|
- exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+ exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- aggregate [$$76] <- [agg-sql-count(1)] [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+ aggregate [$$76] <- [agg-sql-count(1)] [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
-- AGGREGATE |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 28.6]
+ exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 23.6]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- join (or(eq($$71, "7"), neq($$69, 0))) [cardinality: 6.0, op-cost: 12.6, total-cost: 28.6]
+ join (or(eq($$71, "7"), neq($$69, 0))) [cardinality: 6.0, op-cost: 12.6, total-cost: 23.6]
-- NESTED_LOOP |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- project ([$$71]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$71]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- assign [$$71] <- [$$p.getField("arrayOrObject").getField("text")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ assign [$$71] <- [$$p.getField("arrayOrObject").getField("text")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ASSIGN |PARTITIONED|
- project ([$$p]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$p]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- assign [$$p] <- [$$b2] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ assign [$$p] <- [$$b2] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ASSIGN |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- REPLICATE |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- project ([$$b2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$b2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+ data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
-- DATASOURCE_SCAN |PARTITIONED|
exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- BROADCAST_EXCHANGE |PARTITIONED|
- aggregate [$$69] <- [agg-sum($$75)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+ aggregate [$$69] <- [agg-sum($$75)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- AGGREGATE |UNPARTITIONED|
- exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- aggregate [$$75] <- [agg-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+ aggregate [$$75] <- [agg-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- AGGREGATE |PARTITIONED|
- select (eq($$ao.getField("text"), "1")) [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+ select (eq($$ao.getField("text"), "1")) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_SELECT |PARTITIONED|
- project ([$$ao]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$ao]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- unnest $$ao <- scan-collection($$72) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ unnest $$ao <- scan-collection($$72) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- UNNEST |PARTITIONED|
- project ([$$72]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$72]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- assign [$$72] <- [$$b2.getField("arrayOrObject")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ assign [$$72] <- [$$b2.getField("arrayOrObject")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ASSIGN |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- REPLICATE |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- project ([$$b2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$b2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+ data-scan []<-[$$68, $$b2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
-- DATASOURCE_SCAN |PARTITIONED|
exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan
index 70f874e..345fc12 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/column/filter/subplan/subplan.062.plan
@@ -1,8 +1,8 @@
-distribute result [$$63] [cardinality: 2.1, op-cost: 0.0, total-cost: 20.41]
+distribute result [$$63] [cardinality: 2.1, op-cost: 0.0, total-cost: 15.41]
-- DISTRIBUTE_RESULT |PARTITIONED|
- exchange [cardinality: 2.1, op-cost: 0.0, total-cost: 20.41]
+ exchange [cardinality: 2.1, op-cost: 0.0, total-cost: 15.41]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- join (true) [cardinality: 2.1, op-cost: 4.41, total-cost: 20.41]
+ join (true) [cardinality: 2.1, op-cost: 4.41, total-cost: 15.41]
-- NESTED_LOOP |PARTITIONED|
exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -10,59 +10,59 @@
-- STREAM_PROJECT |PARTITIONED|
select (eq($$p.getField("arrayOrObject").getField("text"), "7")) [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
-- STREAM_SELECT |PARTITIONED|
- project ([$$p]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$p]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- assign [$$p] <- [$$p2] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ assign [$$p] <- [$$p2] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ASSIGN |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ exchange [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- REPLICATE |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- project ([$$p2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$p2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+ data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
-- DATASOURCE_SCAN |PARTITIONED|
exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
empty-tuple-source [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- BROADCAST_EXCHANGE |PARTITIONED|
- aggregate [$$63] <- [agg-sql-sum($$67)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+ aggregate [$$63] <- [agg-sql-sum($$67)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- AGGREGATE |UNPARTITIONED|
- exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- aggregate [$$67] <- [agg-sql-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+ aggregate [$$67] <- [agg-sql-count(1)] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- AGGREGATE |PARTITIONED|
- select (and(ge($$60, "1"), le($$60, "2"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 6.0]
+ select (and(ge($$60, "1"), le($$60, "2"))) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_SELECT |PARTITIONED|
- project ([$$60]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$60]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- assign [$$60] <- [$$ao.getField("text")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ assign [$$60] <- [$$ao.getField("text")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ASSIGN |PARTITIONED|
- project ([$$ao]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$ao]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- unnest $$ao <- scan-collection($$65) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ unnest $$ao <- scan-collection($$65) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- UNNEST |PARTITIONED|
- project ([$$65]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$65]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- assign [$$65] <- [$$p2.getField("arrayOrObject")] [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ assign [$$65] <- [$$p2.getField("arrayOrObject")] [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- ASSIGN |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- replicate [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ replicate [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- REPLICATE |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- project ([$$p2]) [cardinality: 6.0, op-cost: 0.0, total-cost: 6.0]
+ project ([$$p2]) [cardinality: 1.0, op-cost: 0.0, total-cost: 1.0]
-- STREAM_PROJECT |PARTITIONED|
- exchange [cardinality: 6.0, op-cost: 4.0, total-cost: 10.0]
+ exchange [cardinality: 1.0, op-cost: 4.0, total-cost: 5.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 6.0, op-cost: 6.0, total-cost: 6.0]
+ data-scan []<-[$$62, $$p2] <- test.ColumnDataset project ({arrayOrObject:<[{text:any}],{text:any}>}) filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) range-filter on: or(eq($$p2.getField("arrayOrObject").getField("text"), "7"), and(ge(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "1"), le(scan-collection($$p2.getField("arrayOrObject")).getField("text"), "2"))) [cardinality: 1.0, op-cost: 1.0, total-cost: 1.0]
-- DATASOURCE_SCAN |PARTITIONED|
exchange [cardinality: 0.0, op-cost: 0.0, total-cost: 0.0]
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.03.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.03.regexadm
index 0717e21..69842df 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.03.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.03.regexadm
@@ -3,21 +3,7 @@
\s*\Q"signature": {\E
\s*\Q"*": "*"\E
\s*\Q},\E
-\s*\Q"results": [ { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q]\E
+\s*\Q"results": [ {"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null} ]\E
\s*\Q,\E
\s*\Q"plans":{},\E
\s*\Q"warnings": [{\E\s*
@@ -40,4 +26,4 @@
\s*\Q"bufferCachePageReadCount": \E[0-9]+\Q,\E
\s*\Q"warningCount": 12\E
\s*\Q}\E
-\s*\Q}\E\s*
\ No newline at end of file
+\s*\Q}\E\s*
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.04.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.04.regexadm
index 7c0d2e7..245357d 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.04.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.04.regexadm
@@ -3,21 +3,7 @@
\s*\Q"signature": {\E
\s*\Q"*": "*"\E
\s*\Q},\E
-\s*\Q"results": [ { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q]\E
+\s*\Q"results": [ {"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null} ]\E
\s*\Q,\E
\s*\Q"plans":{},\E
\s*\Q"status": "success",\E
@@ -33,4 +19,4 @@
\s*\Q"bufferCachePageReadCount": \E[0-9]+\Q,\E
\s*\Q"warningCount": 10\E
\s*\Q}\E
-\s*\Q}\E\s*
\ No newline at end of file
+\s*\Q}\E\s*
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.05.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.05.regexadm
index 49d65ff..28a9b78 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.05.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.05.regexadm
@@ -3,21 +3,7 @@
\s*\Q"signature": {\E
\s*\Q"*": "*"\E
\s*\Q},\E
-\s*\Q"results": [ { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q]\E
+\s*\Q"results": [ {"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null} ]\E
\s*\Q,\E
\s*\Q"plans":{},\E
\s*\Q"status": "success",\E
@@ -33,4 +19,4 @@
\s*\Q"bufferCachePageReadCount": \E[0-9]+\Q,\E
\s*\Q"warningCount": 11\E
\s*\Q}\E
-\s*\Q}\E\s*
\ No newline at end of file
+\s*\Q}\E\s*
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.07.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.07.regexadm
index 49d65ff..28a9b78 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.07.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.07.regexadm
@@ -3,21 +3,7 @@
\s*\Q"signature": {\E
\s*\Q"*": "*"\E
\s*\Q},\E
-\s*\Q"results": [ { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q]\E
+\s*\Q"results": [ {"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null} ]\E
\s*\Q,\E
\s*\Q"plans":{},\E
\s*\Q"status": "success",\E
@@ -33,4 +19,4 @@
\s*\Q"bufferCachePageReadCount": \E[0-9]+\Q,\E
\s*\Q"warningCount": 11\E
\s*\Q}\E
-\s*\Q}\E\s*
\ No newline at end of file
+\s*\Q}\E\s*
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.08.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.08.regexadm
index 0717e21..69842df 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.08.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_cbo/warnings/warnings-limit/warnings-limit.08.regexadm
@@ -3,21 +3,7 @@
\s*\Q"signature": {\E
\s*\Q"*": "*"\E
\s*\Q},\E
-\s*\Q"results": [ { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": false }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q, { "F1": { "a": 1 }, "F2": null }\E
-\s*\Q]\E
+\s*\Q"results": [ {"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":false},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null},{"F1":{"a":1},"F2":null} ]\E
\s*\Q,\E
\s*\Q"plans":{},\E
\s*\Q"warnings": [{\E\s*
@@ -40,4 +26,4 @@
\s*\Q"bufferCachePageReadCount": \E[0-9]+\Q,\E
\s*\Q"warningCount": 12\E
\s*\Q}\E
-\s*\Q}\E\s*
\ No newline at end of file
+\s*\Q}\E\s*
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml
index 39b667d..2bbc5a2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/sqlpp_queries.xml
@@ -626,6 +626,36 @@
</compilation-unit>
</test-case>
<test-case FilePath="aggregate">
+ <compilation-unit name="countn_01">
+ <output-dir compare="Text">countn_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="countn_02">
+ <output-dir compare="Text">countn_02</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="countn_empty_01">
+ <output-dir compare="Text">countn_empty_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="countn_empty_02">
+ <output-dir compare="Text">countn_empty_02</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="countn_null">
+ <output-dir compare="Text">countn_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="countn_distinct">
+ <output-dir compare="Text">countn_distinct</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
<compilation-unit name="kurtosis_double">
<output-dir compare="Text">kurtosis_double</output-dir>
</compilation-unit>
@@ -761,6 +791,21 @@
</compilation-unit>
</test-case>
<test-case FilePath="aggregate">
+ <compilation-unit name="scalar_countn">
+ <output-dir compare="Text">scalar_countn</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="scalar_countn_empty">
+ <output-dir compare="Text">scalar_countn_empty</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
+ <compilation-unit name="scalar_countn_null">
+ <output-dir compare="Text">scalar_countn_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate">
<compilation-unit name="scalar_kurtosis">
<output-dir compare="Text">scalar_kurtosis</output-dir>
</compilation-unit>
@@ -1904,6 +1949,36 @@
</compilation-unit>
</test-case>
<test-case FilePath="aggregate-sql">
+ <compilation-unit name="countn_01">
+ <output-dir compare="Text">countn_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="countn_02">
+ <output-dir compare="Text">countn_02</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="countn_empty_01">
+ <output-dir compare="Text">countn_empty_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="countn_empty_02">
+ <output-dir compare="Text">countn_empty_02</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="countn_null">
+ <output-dir compare="Text">countn_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="countn_distinct">
+ <output-dir compare="Text">countn_distinct</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
<compilation-unit name="kurtosis_double">
<output-dir compare="Text">kurtosis_double</output-dir>
</compilation-unit>
@@ -2039,6 +2114,21 @@
</compilation-unit>
</test-case>
<test-case FilePath="aggregate-sql">
+ <compilation-unit name="scalar_countn">
+ <output-dir compare="Text">scalar_countn</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="scalar_countn_empty">
+ <output-dir compare="Text">scalar_countn_empty</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
+ <compilation-unit name="scalar_countn_null">
+ <output-dir compare="Text">scalar_countn_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="aggregate-sql">
<compilation-unit name="scalar_kurtosis">
<output-dir compare="Text">scalar_kurtosis</output-dir>
</compilation-unit>
@@ -3041,6 +3131,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="array_fun">
+ <compilation-unit name="array_slice/array_slice_bracket_notation">
+ <output-dir compare="Text">array_slice/array_slice_bracket_notation</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="array_fun">
<compilation-unit name="array_slice/array_slice_double_argument">
<output-dir compare="Text">array_slice/array_slice_double_argument</output-dir>
</compilation-unit>
@@ -7421,6 +7516,11 @@
<output-dir compare="Text">remove_listify</output-dir>
</compilation-unit>
</test-case>
+ <test-case FilePath="misc">
+ <compilation-unit name="query-ASTERIXDB-3410">
+ <output-dir compare="Text">query-ASTERIXDB-3410</output-dir>
+ </compilation-unit>
+ </test-case>
</test-group>
<test-group name="multipart-dataverse">
<test-case FilePath="multipart-dataverse">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
index db61282..5501aec 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset_s3.xml
@@ -40,6 +40,31 @@
</compilation-unit>
</test-case>
<test-case FilePath="copy-to">
+ <compilation-unit name="parquet-simple">
+ <output-dir compare="Text">parquet-simple</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="copy-to">
+ <compilation-unit name="parquet-tweet">
+ <output-dir compare="Text">parquet-tweet</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="copy-to">
+ <compilation-unit name="parquet-utf8">
+ <output-dir compare="Text">parquet-utf8</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="copy-to">
+ <compilation-unit name="parquet-cover-data-types">
+ <output-dir compare="Text">parquet-cover-data-types</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="copy-to">
+ <compilation-unit name="parquet-empty-array">
+ <output-dir compare="Text">parquet-empty-array</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="copy-to">
<compilation-unit name="empty-path">
<output-dir compare="Text">empty-path</output-dir>
</compilation-unit>
@@ -80,11 +105,30 @@
<compilation-unit name="supported-adapter-format-compression">
<output-dir compare="Text">supported-adapter-format-compression</output-dir>
<expected-error>ASX1188: Unsupported writing adapter 'AZUREBLOB'. Supported adapters: [gcs, localfs, s3]</expected-error>
- <expected-error>ASX1189: Unsupported writing format 'csv'. Supported formats: [json]</expected-error>
- <expected-error>ASX1096: Unknown compression scheme rar. Supported schemes are [gzip]</expected-error>
+ <expected-error>ASX1189: Unsupported writing format 'csv'. Supported formats: [json, parquet]</expected-error>
+ <expected-error>ASX1202: Unsupported compression scheme rar. Supported schemes for json are [gzip]</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="copy-to/negative">
+ <compilation-unit name="parquet-error-checks">
+ <output-dir compare="Text">parquet-error-checks</output-dir>
+ <expected-error>ASX1079: Compilation error: TYPE() Expression is required for parquet format</expected-error>
+ <expected-error>ASX0037: Type mismatch: expected value of type integer, but got the value of type BINARY</expected-error>
+ <expected-error>HYR0132: Extra field in the result, field 'second' does not exist at 'nested' in the schema</expected-error>
+ <expected-error>HYR0131: Result does not follow the schema, group type expected but found primitive type at 'nested'</expected-error>
+ <expected-error>HYR0131: Result does not follow the schema, primitive type expected but found group type at 'name'</expected-error>
+ <expected-error>ASX1201: Storage units expected for the field 'row-group-size' (e.g., 0.1KB, 100kb, 1mb, 3MB, 8.5GB ...). Provided 'random'</expected-error>
+ <expected-error>ASX1201: Storage units expected for the field 'page-size' (e.g., 0.1KB, 100kb, 1mb, 3MB, 8.5GB ...). Provided 'random'</expected-error>
+ <expected-error>ASX1202: Unsupported compression scheme rar. Supported schemes for parquet are [gzip, snappy, zstd]</expected-error>
+ <expected-error>ASX1001: Syntax error</expected-error>
+ <expected-error>ASX1204: 'binary' type not supported in parquet format</expected-error>
+ <expected-error>ASX1205: Invalid Parquet Writer Version provided '3'. Supported values: [1, 2]</expected-error>
+ <expected-error>ASX1001: Syntax error</expected-error>
+ <expected-error>ASX1001: Syntax error</expected-error>
+ <expected-error>ASX1001: Syntax error</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="copy-to/negative">
<compilation-unit name="empty-over">
<output-dir compare="Text">empty-over</output-dir>
<expected-error>ASX1001: Syntax error: OVER-clause cannot be empty</expected-error>
diff --git a/asterixdb/asterix-cloud/pom.xml b/asterixdb/asterix-cloud/pom.xml
index 659a6d7..9244979 100644
--- a/asterixdb/asterix-cloud/pom.xml
+++ b/asterixdb/asterix-cloud/pom.xml
@@ -16,181 +16,258 @@
! specific language governing permissions and limitations
! under the License.
!-->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>apache-asterixdb</artifactId>
- <groupId>org.apache.asterix</groupId>
- <version>0.9.10-SNAPSHOT</version>
- </parent>
- <artifactId>asterix-cloud</artifactId>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>apache-asterixdb</artifactId>
+ <groupId>org.apache.asterix</groupId>
+ <version>0.9.10-SNAPSHOT</version>
+ </parent>
+ <artifactId>asterix-cloud</artifactId>
- <licenses>
- <license>
- <name>Apache License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- <comments>A business-friendly OSS license</comments>
- </license>
- </licenses>
+ <licenses>
+ <license>
+ <name>Apache License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ <comments>A business-friendly OSS license</comments>
+ </license>
+ </licenses>
- <properties>
- <root.dir>${basedir}/..</root.dir>
- </properties>
+ <properties>
+ <root.dir>${basedir}/..</root.dir>
+ </properties>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <executions>
- <execution>
- <id>default</id>
- <phase>validate</phase>
- <goals>
- <goal>check</goal>
- </goals>
- <configuration>
- <licenses>
- <license implementation="org.apache.rat.analysis.license.ApacheSoftwareLicense20"/>
- </licenses>
- <excludes combine.children="append">
- <exclude>src/test/resources/result/**</exclude>
- </excludes>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>com.googlecode.maven-download-plugin</groupId>
- <artifactId>download-maven-plugin</artifactId>
- <version>1.4.2</version>
- <executions>
- <execution>
- <id>install-fake-gcs</id>
- <phase>${gcs.download.stage}</phase>
- <goals>
- <goal>wget</goal>
- </goals>
- <configuration>
- <url>https://github.com/fsouza/fake-gcs-server/releases/download/v1.48.0/fake-gcs-server_1.48.0_Linux_amd64.tar.gz</url>
- <outputFileName>fake-gcs-server_1.48.0_Linux_amd64.tar.gz</outputFileName>
- <outputDirectory>${project.build.directory}</outputDirectory>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <id>extract-gcs</id>
- <phase>${gcs.install.stage}</phase>
- <configuration>
- <target>
- <echo message="Extracting fake-gcs-server" />
- <mkdir dir="${project.build.directory}/fake-gcs-server" />
- <gunzip src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar.gz" dest="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar" />
- <untar src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar" dest="${project.build.directory}/fake-gcs-server" />
- <chmod file="${project.build.directory}/fake-gcs-server/fake-gcs-server" perm="ugo+rx" />
- </target>
- </configuration>
- <goals>
- <goal>run</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>exec-maven-plugin</artifactId>
- <executions>
- <execution>
- <id>fake-gcs-server</id>
- <phase>${gcs.stage}</phase>
- <goals>
- <goal>exec</goal>
- </goals>
- <configuration>
- <executable>${project.build.directory}/fake-gcs-server/fake-gcs-server</executable>
- <workingDirectory>${project.build.directory}/fake-gcs-server</workingDirectory>
- <arguments>
- <argument>-port</argument>
- <argument>4443</argument>
- <argument>-scheme</argument>
- <argument>http</argument>
- <argument>-host</argument>
- <argument>127.0.0.1</argument>
- <argument>-log-level</argument>
- <argument>error</argument>
- <argument>-filesystem-root</argument>
- <argument>${project.build.directory}/fake-gcs-server/storage</argument>
- </arguments>
- <async>true</async>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>default</id>
+ <phase>validate</phase>
+ <goals>
+ <goal>check</goal>
+ </goals>
+ <configuration>
+ <licenses>
+ <license implementation="org.apache.rat.analysis.license.ApacheSoftwareLicense20"/>
+ </licenses>
+ <excludes combine.children="append">
+ <exclude>src/test/resources/result/**</exclude>
+ </excludes>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>com.googlecode.maven-download-plugin</groupId>
+ <artifactId>download-maven-plugin</artifactId>
+ <version>1.4.2</version>
+ <executions>
+ <execution>
+ <id>install-fake-gcs</id>
+ <phase>${gcs.download.stage}</phase>
+ <goals>
+ <goal>wget</goal>
+ </goals>
+ <configuration>
+ <url>
+ https://github.com/fsouza/fake-gcs-server/releases/download/v1.48.0/fake-gcs-server_1.48.0_Linux_amd64.tar.gz
+ </url>
+ <outputFileName>fake-gcs-server_1.48.0_Linux_amd64.tar.gz</outputFileName>
+ <outputDirectory>${project.build.directory}</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>extract-gcs</id>
+ <phase>${gcs.install.stage}</phase>
+ <configuration>
+ <target>
+ <echo message="Extracting fake-gcs-server"/>
+ <mkdir dir="${project.build.directory}/fake-gcs-server"/>
+ <gunzip src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar.gz"
+ dest="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar"/>
+ <untar src="${project.build.directory}/fake-gcs-server_1.48.0_Linux_amd64.tar"
+ dest="${project.build.directory}/fake-gcs-server"/>
+ <chmod file="${project.build.directory}/fake-gcs-server/fake-gcs-server" perm="ugo+rx"/>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>exec-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>fake-gcs-server</id>
+ <phase>${gcs.stage}</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <executable>${project.build.directory}/fake-gcs-server/fake-gcs-server</executable>
+ <workingDirectory>${project.build.directory}/fake-gcs-server</workingDirectory>
+ <arguments>
+ <argument>-port</argument>
+ <argument>4443</argument>
+ <argument>-scheme</argument>
+ <argument>http</argument>
+ <argument>-host</argument>
+ <argument>127.0.0.1</argument>
+ <argument>-log-level</argument>
+ <argument>error</argument>
+ <argument>-filesystem-root</argument>
+ <argument>${project.build.directory}/fake-gcs-server/storage</argument>
+ </arguments>
+ <async>true</async>
+ </configuration>
+ </execution>
+ <execution>
+ <id>azurite</id>
+ <phase>${azurite.stage}</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <!--suppress UnresolvedMavenProperty -->
+ <executable>${project.build.directory}/npm/node_modules/.bin/azurite-blob</executable>
+ <workingDirectory>${project.build.directory}</workingDirectory>
+ <environmentVariables>
+ <PATH>${project.build.directory}/npm/node</PATH>
+ </environmentVariables>
+ <arguments>
+ <argument>--blobPort</argument>
+ <argument>15055</argument>
+ <argument>--location</argument>
+ <argument>${project.build.directory}/azurite</argument>
+ <argument>--debug</argument>
+ <argument>${project.build.directory}/azurite/logs/azurite-debug.log</argument>
+ </arguments>
+ <async>true</async>
+ <outputFile>${project.build.directory}/azurite/logs/azurite.log</outputFile>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
- <dependencies>
- <dependency>
- <groupId>org.apache.hyracks</groupId>
- <artifactId>hyracks-cloud</artifactId>
- <version>${hyracks.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-common</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-external-data</artifactId>
- <version>${project.version}</version>
- </dependency>
- <!-- aws s3 start -->
- <dependency>
- <groupId>software.amazon.awssdk</groupId>
- <artifactId>sdk-core</artifactId>
- </dependency>
- <dependency>
- <groupId>software.amazon.awssdk</groupId>
- <artifactId>s3</artifactId>
- </dependency>
- <dependency>
- <groupId>software.amazon.awssdk</groupId>
- <artifactId>regions</artifactId>
- </dependency>
- <dependency>
- <groupId>software.amazon.awssdk</groupId>
- <artifactId>auth</artifactId>
- </dependency>
- <dependency>
- <groupId>software.amazon.awssdk</groupId>
- <artifactId>s3-transfer-manager</artifactId>
- </dependency>
- <dependency>
- <groupId>software.amazon.awssdk.crt</groupId>
- <artifactId>aws-crt</artifactId>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>io.findify</groupId>
- <artifactId>s3mock_2.12</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.typesafe.akka</groupId>
- <artifactId>akka-http-core_2.12</artifactId>
- <scope>test</scope>
- </dependency>
- <!-- aws s3 end -->
- </dependencies>
+ <profiles>
+ <profile>
+ <id>azurite-tests</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>com.github.eirslett</groupId>
+ <artifactId>frontend-maven-plugin</artifactId>
+ <version>1.13.4</version>
+ <configuration>
+ <nodeVersion>v14.15.4</nodeVersion>
+ <npmVersion>6.14.11</npmVersion>
+ <workingDirectory>target/npm</workingDirectory>
+ <installDirectory>target/npm</installDirectory>
+ </configuration>
+ <executions>
+ <execution>
+ <id>install node and yarn</id>
+ <goals>
+ <goal>install-node-and-npm</goal>
+ </goals>
+ <phase>${azurite.npm.install.stage}</phase>
+ </execution>
+ <execution>
+ <id>azurite blob</id>
+ <phase>${azurite.install.stage}</phase>
+ <goals>
+ <goal>npm</goal>
+ </goals>
+ <configuration>
+ <arguments>install azurite</arguments>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-cloud</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-external-data</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- aws s3 start -->
+ <dependency>
+ <groupId>software.amazon.awssdk</groupId>
+ <artifactId>sdk-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>software.amazon.awssdk</groupId>
+ <artifactId>s3</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>software.amazon.awssdk</groupId>
+ <artifactId>regions</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>software.amazon.awssdk</groupId>
+ <artifactId>auth</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>software.amazon.awssdk</groupId>
+ <artifactId>s3-transfer-manager</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>software.amazon.awssdk.crt</groupId>
+ <artifactId>aws-crt</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.findify</groupId>
+ <artifactId>s3mock_2.12</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.typesafe.akka</groupId>
+ <artifactId>akka-http-core_2.12</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <!-- aws s3 end -->
+
+ <dependency>
+ <groupId>com.azure</groupId>
+ <artifactId>azure-storage-blob-batch</artifactId>
+ <version>12.23.0</version>
+ </dependency>
+
+ </dependencies>
</project>
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java
index ee43a2c..c98c6b4 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/CloudClientProvider.java
@@ -20,6 +20,8 @@
import org.apache.asterix.cloud.clients.aws.s3.S3ClientConfig;
import org.apache.asterix.cloud.clients.aws.s3.S3CloudClient;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageClientConfig;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageCloudClient;
import org.apache.asterix.cloud.clients.google.gcs.GCSClientConfig;
import org.apache.asterix.cloud.clients.google.gcs.GCSCloudClient;
import org.apache.asterix.common.config.CloudProperties;
@@ -30,6 +32,7 @@
private static final boolean UNSTABLE = isUnstable();
public static final String S3 = "s3";
public static final String GCS = "gs";
+ public static final String AZ_BLOB = "azblob";
private CloudClientProvider() {
throw new AssertionError("do not instantiate");
@@ -45,6 +48,9 @@
} else if (GCS.equalsIgnoreCase(storageScheme)) {
GCSClientConfig config = GCSClientConfig.of(cloudProperties);
cloudClient = new GCSCloudClient(config, guardian);
+ } else if (AZ_BLOB.equalsIgnoreCase(storageScheme)) {
+ AzBlobStorageClientConfig config = AzBlobStorageClientConfig.of(cloudProperties);
+ cloudClient = new AzBlobStorageCloudClient(config, guardian);
} else {
throw new IllegalStateException("unsupported cloud storage scheme: " + storageScheme);
}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageBufferedWriter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageBufferedWriter.java
new file mode 100644
index 0000000..2a79c86
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageBufferedWriter.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import java.io.BufferedInputStream;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Base64;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.cloud.clients.ICloudBufferedWriter;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
+import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
+import org.apache.commons.io.IOUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import com.azure.core.util.BinaryData;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.specialized.BlockBlobClient;
+
+public class AzBlobStorageBufferedWriter implements ICloudBufferedWriter {
+ private static final String PUT_UPLOAD_ID = "putUploadId";
+ private static final int MAX_RETRIES = 3;
+ private static final Logger LOGGER = LogManager.getLogger();
+ private final List<String> blockIDArrayList;
+ private final ICloudGuardian guardian;
+ private int blockNumber;
+ private final String path;
+ private String uploadID;
+
+ private final BlobContainerClient blobContainerClient;
+
+ private final IRequestProfilerLimiter profiler;
+
+ private final String bucket;
+
+ public AzBlobStorageBufferedWriter(BlobContainerClient blobContainerClient, IRequestProfilerLimiter profiler,
+ ICloudGuardian guardian, String bucket, String path) {
+ this.blobContainerClient = blobContainerClient;
+ this.profiler = profiler;
+ this.guardian = guardian;
+ this.bucket = bucket;
+ this.path = path;
+ this.blockIDArrayList = new ArrayList<>();
+ }
+
+ @Override
+ public void upload(InputStream stream, int length) {
+ profiler.objectMultipartUpload();
+ if (length <= 0) {
+ String errMsg = String.format("A block with size %d cannot be staged for upload", length);
+ LOGGER.error(errMsg);
+ throw new IllegalArgumentException(errMsg);
+ }
+ guardian.checkIsolatedWriteAccess(bucket, path);
+ try {
+ BlockBlobClient blockBlobClient = blobContainerClient.getBlobClient(path).getBlockBlobClient();
+ BufferedInputStream bufferedInputStream = IOUtils.buffer(stream, length);
+ String blockID =
+ Base64.getEncoder().encodeToString(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
+ initBlockBlobUploads(blockID);
+ blockIDArrayList.add(blockID);
+ blockBlobClient.stageBlock(blockID, bufferedInputStream, length);
+ } catch (Exception e) {
+ LOGGER.error("Error while uploading blocks of data: {}", e.getMessage());
+ throw new RuntimeException(e);
+ }
+ blockNumber++;
+ }
+
+ private void initBlockBlobUploads(String blockID) {
+ if (this.uploadID == null) {
+ this.uploadID = blockID;
+ this.blockNumber = 1;
+ }
+ }
+
+ @Override
+ public void uploadLast(InputStream stream, ByteBuffer buffer) throws HyracksDataException {
+ if (uploadID == null) {
+ profiler.objectWrite();
+ BlobClient blobClient = blobContainerClient.getBlobClient(path);
+ BinaryData binaryData = BinaryData.fromBytes(getDataFromBuffer(buffer));
+ blobClient.upload(binaryData);
+ uploadID = PUT_UPLOAD_ID; // uploadID should be updated if the put-object operation succeeds
+ } else {
+ upload(stream, buffer.limit());
+ }
+ }
+
+ private byte[] getDataFromBuffer(ByteBuffer buffer) {
+ byte[] data = new byte[buffer.limit()];
+ buffer.get(data, 0, buffer.limit());
+ return data;
+ }
+
+ @Override
+ public boolean isEmpty() {
+ return this.uploadID == null;
+ }
+
+ @Override
+ public void finish() throws HyracksDataException {
+ if (this.uploadID == null) {
+ throw new IllegalStateException("Cannot finish without writing any bytes");
+ } else if (PUT_UPLOAD_ID.equals(uploadID)) {
+ return;
+ }
+ int currRetryAttempt = 0;
+ BlockBlobClient blockBlobClient = blobContainerClient.getBlobClient(path).getBlockBlobClient();
+ while (true) {
+ try {
+ guardian.checkWriteAccess(bucket, path);
+ profiler.objectMultipartUpload();
+ blockBlobClient.commitBlockList(blockIDArrayList);
+ break;
+ } catch (BlobStorageException e) {
+ currRetryAttempt++;
+ if (currRetryAttempt == MAX_RETRIES) {
+ throw HyracksDataException.create(e);
+ }
+ LOGGER.info(() -> "AzBlob storage write retry, encountered: " + e.getMessage());
+
+ // Backoff for 1 sec for the first 2 retries, and 2 seconds from there onward
+ try {
+ Thread.sleep(TimeUnit.SECONDS.toMillis(currRetryAttempt < 2 ? 1 : 2));
+ } catch (InterruptedException ex) {
+ Thread.currentThread().interrupt();
+ throw HyracksDataException.create(ex);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void abort() throws HyracksDataException {
+ // Todo: As of the current Azure Java SDK, it does not support aborting a staged or under-upload block.
+ // https://github.com/Azure/azure-sdk-for-java/issues/31150
+ LOGGER.warn("Multipart upload for {} was aborted", path);
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageClientConfig.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageClientConfig.java
new file mode 100644
index 0000000..9aedfc3
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageClientConfig.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import java.util.Objects;
+
+import org.apache.asterix.common.config.CloudProperties;
+
+import com.azure.identity.DefaultAzureCredential;
+import com.azure.identity.DefaultAzureCredentialBuilder;
+
+public class AzBlobStorageClientConfig {
+ private final int writeBufferSize;
+ // Ref: https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch?tabs=microsoft-entra-id
+ static final int DELETE_BATCH_SIZE = 256;
+ private final String region;
+ private final String endpoint;
+ private final String prefix;
+
+ private final boolean anonymousAuth;
+ private final long profilerLogInterval;
+ private final String bucket;
+ private final long tokenAcquireTimeout;
+ private final int writeMaxRequestsPerSeconds;
+ private final int readMaxRequestsPerSeconds;
+
+ public AzBlobStorageClientConfig(String region, String endpoint, String prefix, boolean anonymousAuth,
+ long profilerLogInterval, String bucket, long tokenAcquireTimeout, int writeMaxRequestsPerSeconds,
+ int readMaxRequestsPerSeconds, int writeBufferSize) {
+ this.region = Objects.requireNonNull(region, "region");
+ this.endpoint = endpoint;
+ this.prefix = Objects.requireNonNull(prefix, "prefix");
+ this.anonymousAuth = anonymousAuth;
+ this.profilerLogInterval = profilerLogInterval;
+ this.bucket = bucket;
+ this.tokenAcquireTimeout = tokenAcquireTimeout;
+ this.writeMaxRequestsPerSeconds = writeMaxRequestsPerSeconds;
+ this.readMaxRequestsPerSeconds = readMaxRequestsPerSeconds;
+ this.writeBufferSize = writeBufferSize;
+ }
+
+ public static AzBlobStorageClientConfig of(CloudProperties cloudProperties) {
+ return new AzBlobStorageClientConfig(cloudProperties.getStorageRegion(), cloudProperties.getStorageEndpoint(),
+ cloudProperties.getStoragePrefix(), cloudProperties.isStorageAnonymousAuth(),
+ cloudProperties.getProfilerLogInterval(), cloudProperties.getStorageBucket(),
+ cloudProperties.getTokenAcquireTimeout(), cloudProperties.getWriteMaxRequestsPerSecond(),
+ cloudProperties.getReadMaxRequestsPerSecond(), cloudProperties.getWriteBufferSize());
+ }
+
+ public String getRegion() {
+ return region;
+ }
+
+ public String getEndpoint() {
+ return endpoint;
+ }
+
+ public String getPrefix() {
+ return prefix;
+ }
+
+ public String getBucket() {
+ return bucket;
+ }
+
+ public long getProfilerLogInterval() {
+ return profilerLogInterval;
+ }
+
+ public boolean isAnonymousAuth() {
+ return anonymousAuth;
+ }
+
+ public DefaultAzureCredential createCredentialsProvider() {
+ return new DefaultAzureCredentialBuilder().build();
+ }
+
+ public long getTokenAcquireTimeout() {
+ return tokenAcquireTimeout;
+ }
+
+ public int getWriteMaxRequestsPerSeconds() {
+ return writeMaxRequestsPerSeconds;
+ }
+
+ public int getReadMaxRequestsPerSeconds() {
+ return readMaxRequestsPerSeconds;
+ }
+
+ public int getWriteBufferSize() {
+ return writeBufferSize;
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageCloudClient.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageCloudClient.java
new file mode 100644
index 0000000..b9f9421
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzBlobStorageCloudClient.java
@@ -0,0 +1,402 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import static org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageClientConfig.DELETE_BATCH_SIZE;
+
+import java.io.ByteArrayOutputStream;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+import java.nio.ReadOnlyBufferException;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.asterix.cloud.CloudResettableInputStream;
+import org.apache.asterix.cloud.IWriteBufferProvider;
+import org.apache.asterix.cloud.clients.CloudFile;
+import org.apache.asterix.cloud.clients.ICloudBufferedWriter;
+import org.apache.asterix.cloud.clients.ICloudClient;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
+import org.apache.asterix.cloud.clients.ICloudWriter;
+import org.apache.asterix.cloud.clients.IParallelDownloader;
+import org.apache.asterix.cloud.clients.profiler.CountRequestProfilerLimiter;
+import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
+import org.apache.asterix.cloud.clients.profiler.RequestLimiterNoOpProfiler;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.control.nc.io.IOManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import com.azure.core.http.rest.PagedIterable;
+import com.azure.core.util.BinaryData;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobContainerClientBuilder;
+import com.azure.storage.blob.batch.BlobBatchClient;
+import com.azure.storage.blob.batch.BlobBatchClientBuilder;
+import com.azure.storage.blob.models.BlobErrorCode;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.BlobListDetails;
+import com.azure.storage.blob.models.BlobRange;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.models.ListBlobsOptions;
+import com.azure.storage.common.StorageSharedKeyCredential;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+public class AzBlobStorageCloudClient implements ICloudClient {
+ private static final String BUCKET_ROOT_PATH = "";
+ public static final String AZURITE_ENDPOINT = "http://127.0.0.1:15055/devstoreaccount1/";
+ private static final String AZURITE_ACCOUNT_NAME = "devstoreaccount1";
+ private static final String AZURITE_ACCOUNT_KEY =
+ "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
+ private final ICloudGuardian guardian;
+ private BlobContainerClient blobContainerClient;
+ private AzBlobStorageClientConfig config;
+ private IRequestProfilerLimiter profiler;
+ private final BlobBatchClient blobBatchClient;
+ private static final Logger LOGGER = LogManager.getLogger();
+
+ public AzBlobStorageCloudClient(AzBlobStorageClientConfig config, ICloudGuardian guardian) {
+ this(config, buildClient(config), guardian);
+ }
+
+ public AzBlobStorageCloudClient(AzBlobStorageClientConfig config, BlobContainerClient blobContainerClient,
+ ICloudGuardian guardian) {
+ this.blobContainerClient = blobContainerClient;
+ this.config = config;
+ this.guardian = guardian;
+ long profilerInterval = config.getProfilerLogInterval();
+ AzureRequestRateLimiter limiter = new AzureRequestRateLimiter(config);
+ if (profilerInterval > 0) {
+ profiler = new CountRequestProfilerLimiter(profilerInterval, limiter);
+ } else {
+ profiler = new RequestLimiterNoOpProfiler(limiter);
+ }
+ guardian.setCloudClient(this);
+ blobBatchClient = new BlobBatchClientBuilder(blobContainerClient.getServiceClient()).buildClient();
+ }
+
+ @Override
+ public int getWriteBufferSize() {
+ return config.getWriteBufferSize();
+ }
+
+ @Override
+ public IRequestProfilerLimiter getProfilerLimiter() {
+ return profiler;
+ }
+
+ @Override
+ public ICloudWriter createWriter(String bucket, String path, IWriteBufferProvider bufferProvider) {
+ ICloudBufferedWriter bufferedWriter = new AzBlobStorageBufferedWriter(blobContainerClient, profiler, guardian,
+ bucket, config.getPrefix() + path);
+ return new CloudResettableInputStream(bufferedWriter, bufferProvider);
+ }
+
+ @Override
+ public Set<CloudFile> listObjects(String bucket, String path, FilenameFilter filter) {
+ guardian.checkReadAccess(bucket, path);
+ profiler.objectsList();
+ PagedIterable<BlobItem> blobItems = getBlobItems(bucket, config.getPrefix() + path);
+ Stream<CloudFile> cloudFileStream = mapBlobItemsToStreamOfCloudFiles(blobItems);
+ return filterCloudFiles(filter, cloudFileStream);
+ }
+
+ private Set<CloudFile> filterCloudFiles(FilenameFilter filter, Stream<CloudFile> cloudFileStream) {
+ if (filter == null) {
+ return cloudFileStream.map(this::removeCloudPrefixFromBlobName).collect(Collectors.toSet());
+ }
+ return cloudFileStream.filter(cloudFile -> filter.accept(null, cloudFile.getPath()))
+ .map(this::removeCloudPrefixFromBlobName).collect(Collectors.toSet());
+ }
+
+ private CloudFile removeCloudPrefixFromBlobName(CloudFile cloudFile) {
+ String fullyQualifiedBlobName = cloudFile.getPath();
+ fullyQualifiedBlobName = fullyQualifiedBlobName.substring(config.getPrefix().length());
+ return CloudFile.of(fullyQualifiedBlobName, cloudFile.getSize());
+ }
+
+ private Stream<CloudFile> mapBlobItemsToStreamOfCloudFiles(PagedIterable<BlobItem> blobItems) {
+ return blobItems.stream()
+ .map(blobItem -> CloudFile.of(blobItem.getName(), blobItem.getProperties().getContentLength()));
+ }
+
+ private PagedIterable<BlobItem> getBlobItems(String bucket, String path) {
+ ListBlobsOptions options =
+ new ListBlobsOptions().setPrefix(path).setDetails(new BlobListDetails().setRetrieveMetadata(true));
+ return blobContainerClient.listBlobs(options, null);
+ }
+
+ @Override
+ public int read(String bucket, String path, long offset, ByteBuffer buffer) throws HyracksDataException {
+ guardian.checkReadAccess(bucket, path);
+ profiler.objectGet();
+ BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+ ByteArrayOutputStream blobStream = new ByteArrayOutputStream(buffer.capacity());
+ long rem = buffer.remaining();
+ BlobRange blobRange = new BlobRange(offset, rem);
+ downloadBlob(blobClient, blobStream, blobRange);
+ readBlobStreamIntoBuffer(buffer, blobStream);
+ if (buffer.remaining() != 0)
+ throw new IllegalStateException("Expected buffer remaining = 0, found: " + buffer.remaining());
+ return ((int) rem - buffer.remaining());
+ }
+
+ private void readBlobStreamIntoBuffer(ByteBuffer buffer, ByteArrayOutputStream byteArrayOutputStream)
+ throws HyracksDataException {
+ byte[] byteArray = byteArrayOutputStream.toByteArray();
+ try {
+ buffer.put(byteArray);
+ byteArrayOutputStream.close();
+ } catch (BufferOverflowException | ReadOnlyBufferException | IOException ex) {
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ private void downloadBlob(BlobClient blobClient, ByteArrayOutputStream byteArrayOutputStream, BlobRange blobRange)
+ throws HyracksDataException {
+ try {
+ blobClient.downloadStreamWithResponse(byteArrayOutputStream, blobRange, null, null, false, null, null);
+ } catch (BlobStorageException ex) {
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ @Override
+ public byte[] readAllBytes(String bucket, String path) throws HyracksDataException {
+ guardian.checkReadAccess(bucket, path);
+ profiler.objectGet();
+ BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+ try {
+ BinaryData binaryData = blobClient.downloadContent();
+ return binaryData.toBytes();
+ } catch (BlobStorageException ex) {
+ BlobErrorCode errorCode = ex.getErrorCode();
+ if (errorCode.equals(BlobErrorCode.BLOB_NOT_FOUND)) {
+ LOGGER.warn("Blob not found on cloud: {}", path);
+ return null;
+ }
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ @Override
+ public InputStream getObjectStream(String bucket, String path, long offset, long length) {
+ guardian.checkReadAccess(bucket, path);
+ profiler.objectGet();
+ BlobRange blobRange = new BlobRange(offset, length);
+ BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+ try {
+ return blobClient.openInputStream(blobRange, null);
+ } catch (BlobStorageException ex) {
+ LOGGER.error("error getting object stream for path: {}. Exception: {}", path, ex.getMessage());
+ throw new IllegalStateException(ex);
+ }
+ }
+
+ @Override
+ public void write(String bucket, String path, byte[] data) {
+ guardian.checkWriteAccess(bucket, path);
+ profiler.objectWrite();
+ BinaryData binaryData = BinaryData.fromBytes(data);
+ BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+ blobClient.upload(binaryData, true);
+ }
+
+ @Override
+ public void copy(String bucket, String srcPath, FileReference destPath) {
+ guardian.checkReadAccess(bucket, srcPath);
+ profiler.objectGet();
+ BlobClient srcBlobClient = blobContainerClient.getBlobClient(config.getPrefix() + srcPath);
+ String srcBlobUrl = srcBlobClient.getBlobUrl();
+ profiler.objectCopy();
+ guardian.checkWriteAccess(bucket, destPath.getRelativePath());
+ BlobClient destBlobClient = blobContainerClient.getBlobClient(destPath.getFile().getPath());
+ destBlobClient.beginCopy(srcBlobUrl, null);
+ }
+
+ @Override
+ public void deleteObjects(String bucket, Collection<String> paths) {
+ if (paths.isEmpty())
+ return;
+ Set<BlobItem> blobsToDelete = getBlobsMatchingThesePaths(paths);
+ List<String> blobURLs = getBlobURLs(blobsToDelete);
+ if (blobURLs.isEmpty())
+ return;
+ Collection<List<String>> batchedBlobURLs = getBatchedBlobURLs(blobURLs);
+ for (List<String> batch : batchedBlobURLs) {
+ blobBatchClient.deleteBlobs(batch, null).stream().count();
+ }
+ }
+
+ private Collection<List<String>> getBatchedBlobURLs(List<String> blobURLs) {
+ int startIdx = 0;
+ Collection<List<String>> batchedBLOBURLs = new ArrayList<>();
+ Iterator<String> iterator = blobURLs.iterator();
+ while (iterator.hasNext()) {
+ List<String> batch = new ArrayList<>();
+ while (startIdx < DELETE_BATCH_SIZE && iterator.hasNext()) {
+ batch.add(iterator.next());
+ startIdx++;
+ }
+ batchedBLOBURLs.add(batch);
+ startIdx = 0;
+ }
+ return batchedBLOBURLs;
+ }
+
+ private Set<BlobItem> getBlobsMatchingThesePaths(Collection<String> paths) {
+ List<String> pathWithPrefix =
+ paths.stream().map(path -> config.getPrefix() + path).collect(Collectors.toList());
+ PagedIterable<BlobItem> blobItems = blobContainerClient.listBlobs();
+ return blobItems.stream().filter(blobItem -> pathWithPrefix.contains(blobItem.getName()))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public long getObjectSize(String bucket, String path) throws HyracksDataException {
+ guardian.checkReadAccess(bucket, path);
+ profiler.objectGet();
+ try {
+ BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+ return blobClient.getProperties().getBlobSize();
+ } catch (BlobStorageException ex) {
+ BlobErrorCode errorCode = ex.getErrorCode();
+ if (errorCode.equals(BlobErrorCode.BLOB_NOT_FOUND)) {
+ LOGGER.error("error while getting blob size; no such blob found: {} ", config.getPrefix() + path);
+ return 0;
+ }
+ throw HyracksDataException.create(ex);
+ } catch (Exception ex) {
+ LOGGER.error("error getting size of the blob: {}. Exception: {}", path, ex.getMessage());
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ @Override
+ public boolean exists(String bucket, String path) throws HyracksDataException {
+ guardian.checkReadAccess(bucket, path);
+ profiler.objectGet();
+ try {
+ BlobClient blobClient = blobContainerClient.getBlobClient(config.getPrefix() + path);
+ return blobClient.exists();
+ } catch (BlobStorageException ex) {
+ BlobErrorCode errorCode = ex.getErrorCode();
+ if (errorCode.equals(BlobErrorCode.BLOB_NOT_FOUND)) {
+ return false;
+ }
+ throw HyracksDataException.create(ex);
+ } catch (Exception ex) {
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ @Override
+ public boolean isEmptyPrefix(String bucket, String path) throws HyracksDataException {
+ profiler.objectsList();
+ ListBlobsOptions listBlobsOptions = new ListBlobsOptions().setPrefix(config.getPrefix() + path);
+ //MAX_VALUE below represents practically no timeout
+ PagedIterable<BlobItem> blobItems =
+ blobContainerClient.listBlobs(listBlobsOptions, Duration.ofDays(Long.MAX_VALUE));
+ return blobItems.stream().findAny().isEmpty();
+ }
+
+ @Override
+ public IParallelDownloader createParallelDownloader(String bucket, IOManager ioManager) {
+ return new AzureParallelDownloader(ioManager, blobContainerClient, profiler, config);
+ }
+
+ @Override
+ public JsonNode listAsJson(ObjectMapper objectMapper, String bucket) {
+ profiler.objectsList();
+ PagedIterable<BlobItem> blobItems = getBlobItems(bucket, BUCKET_ROOT_PATH);
+ List<BlobItem> blobs = blobItems.stream().distinct().collect(Collectors.toList());
+ blobs = sortBlobItemsByName(blobs);
+ return mapBlobItemsToJson(blobs, objectMapper);
+ }
+
+ private List<BlobItem> sortBlobItemsByName(List<BlobItem> blobs) {
+ return blobs.stream()
+ .sorted((blob1, blob2) -> String.CASE_INSENSITIVE_ORDER.compare(blob1.getName(), blob2.getName()))
+ .collect(Collectors.toList());
+ }
+
+ private ArrayNode mapBlobItemsToJson(List<BlobItem> blobs, ObjectMapper objectMapper) {
+ ArrayNode objectsInfo = objectMapper.createArrayNode();
+ for (BlobItem blob : blobs) {
+ ObjectNode objectInfo = objectsInfo.addObject();
+ objectInfo.put("path", blob.getName());
+ objectInfo.put("size", blob.getProperties().getContentLength());
+ }
+ return objectsInfo;
+ }
+
+ @Override
+ public void close() {
+ // Closing Azure Blob Clients is not required as the underlying netty connection pool
+ // handles the same for the apps.
+ // Ref: https://github.com/Azure/azure-sdk-for-java/issues/17903
+ // Hence this implementation is a no op.
+ }
+
+ private static BlobContainerClient buildClient(AzBlobStorageClientConfig config) {
+ BlobContainerClientBuilder blobContainerClientBuilder =
+ new BlobContainerClientBuilder().containerName(config.getBucket()).endpoint(getEndpoint(config));
+ configCredentialsToAzClient(blobContainerClientBuilder, config);
+ BlobContainerClient blobContainerClient = blobContainerClientBuilder.buildClient();
+ blobContainerClient.createIfNotExists();
+ return blobContainerClient;
+ }
+
+ private static void configCredentialsToAzClient(BlobContainerClientBuilder builder,
+ AzBlobStorageClientConfig config) {
+ if (config.isAnonymousAuth()) {
+ StorageSharedKeyCredential creds =
+ new StorageSharedKeyCredential(AZURITE_ACCOUNT_NAME, AZURITE_ACCOUNT_KEY);
+ builder.credential(creds);
+ } else {
+ builder.credential(config.createCredentialsProvider());
+ }
+ }
+
+ private static String getEndpoint(AzBlobStorageClientConfig config) {
+ return config.isAnonymousAuth() ? AZURITE_ENDPOINT + config.getBucket()
+ : config.getEndpoint() + "/" + config.getBucket();
+ }
+
+ private List<String> getBlobURLs(Set<BlobItem> blobs) {
+ final String blobURLPrefix = blobContainerClient.getBlobContainerUrl() + "/";
+ return blobs.stream().map(BlobItem::getName).map(blobName -> blobURLPrefix + blobName)
+ .collect(Collectors.toList());
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureParallelDownloader.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureParallelDownloader.java
new file mode 100644
index 0000000..4980587
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureParallelDownloader.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.InvalidPathException;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.asterix.cloud.clients.IParallelDownloader;
+import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.control.nc.io.IOManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import com.azure.core.http.rest.PagedIterable;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.ListBlobsOptions;
+
+public class AzureParallelDownloader implements IParallelDownloader {
+ public static final String STORAGE_SUB_DIR = "storage";
+ private final IOManager ioManager;
+ private final BlobContainerClient blobContainerClient;
+ private final IRequestProfilerLimiter profiler;
+ private final AzBlobStorageClientConfig config;
+ private static final Logger LOGGER = LogManager.getLogger();
+
+ public AzureParallelDownloader(IOManager ioManager, BlobContainerClient blobContainerClient,
+ IRequestProfilerLimiter profiler, AzBlobStorageClientConfig config) {
+ this.ioManager = ioManager;
+ this.blobContainerClient = blobContainerClient;
+ this.profiler = profiler;
+ this.config = config;
+ }
+
+ @Override
+ public void downloadFiles(Collection<FileReference> toDownload) throws HyracksDataException {
+ for (FileReference fileReference : toDownload) {
+ BlobClient blobClient =
+ blobContainerClient.getBlobClient(config.getPrefix() + fileReference.getRelativePath());
+ Path absPath = Path.of(fileReference.getAbsolutePath());
+ Path parentPath = absPath.getParent();
+ OutputStream fileOutputStream = null;
+ try {
+ createDirectories(parentPath);
+ fileOutputStream = Files.newOutputStream(absPath);
+ blobClient.downloadStream(fileOutputStream);
+ fileOutputStream.close();
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ } finally {
+ closeOutputStream(fileOutputStream);
+ }
+ }
+ }
+
+ private static void closeOutputStream(OutputStream fileOutputStream) throws HyracksDataException {
+ if (fileOutputStream != null) {
+ try {
+ fileOutputStream.close();
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ }
+ }
+ }
+
+ @Override
+ public Collection<FileReference> downloadDirectories(Collection<FileReference> directories)
+ throws HyracksDataException {
+ Set<FileReference> failedFiles = new HashSet<>();
+ for (FileReference directory : directories) {
+ PagedIterable<BlobItem> blobsInDir = getBlobItems(directory);
+ for (BlobItem blobItem : blobsInDir) {
+ profiler.objectGet();
+ download(blobItem, failedFiles);
+ }
+ }
+ return failedFiles;
+ }
+
+ private void download(BlobItem blobItem, Set<FileReference> failedFiles) throws HyracksDataException {
+ BlobClient blobClient = blobContainerClient.getBlobClient(blobItem.getName());
+ FileReference diskDestFile = ioManager.resolve(createDiskSubPath(blobItem.getName()));
+ Path absDiskBlobPath = getDiskDestPath(diskDestFile);
+ Path parentDiskPath = absDiskBlobPath.getParent();
+ createDirectories(parentDiskPath);
+ FileOutputStream outputStreamToDest = getOutputStreamToDest(diskDestFile);
+ try {
+ blobClient.downloadStream(outputStreamToDest);
+ } catch (Exception e) {
+ FileReference failedFile = ioManager.resolve(blobItem.getName());
+ failedFiles.add(failedFile);
+ }
+ }
+
+ private String createDiskSubPath(String blobName) {
+ if (!blobName.startsWith(STORAGE_SUB_DIR)) {
+ blobName = blobName.substring(blobName.indexOf(STORAGE_SUB_DIR));
+ }
+ return blobName;
+ }
+
+ private FileOutputStream getOutputStreamToDest(FileReference destFile) throws HyracksDataException {
+ try {
+ return new FileOutputStream(destFile.getAbsolutePath());
+ } catch (FileNotFoundException ex) {
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ private void createDirectories(Path parentPath) throws HyracksDataException {
+ if (Files.notExists(parentPath))
+ try {
+ Files.createDirectories(parentPath);
+ } catch (IOException ex) {
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ private Path getDiskDestPath(FileReference destFile) throws HyracksDataException {
+ try {
+ return Path.of(destFile.getAbsolutePath());
+ } catch (InvalidPathException ex) {
+ throw HyracksDataException.create(ex);
+ }
+ }
+
+ private PagedIterable<BlobItem> getBlobItems(FileReference directoryToDownload) {
+ ListBlobsOptions listBlobsOptions =
+ new ListBlobsOptions().setPrefix(config.getPrefix() + directoryToDownload.getRelativePath());
+ return blobContainerClient.listBlobs(listBlobsOptions, null);
+ }
+
+ @Override
+ public void close() {
+ // Closing Azure Blob Clients is not required as the underlying netty connection pool
+ // handles the same for the apps.
+ // Ref: https://github.com/Azure/azure-sdk-for-java/issues/17903
+ // Hence this implementation is a no op.
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureRequestRateLimiter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureRequestRateLimiter.java
new file mode 100644
index 0000000..6a76952
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/azure/blobstorage/AzureRequestRateLimiter.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.clients.azure.blobstorage;
+
+import org.apache.asterix.cloud.clients.profiler.limiter.IRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.IRequestRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.NoOpRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.TokenBasedRateLimiter;
+
+public final class AzureRequestRateLimiter implements IRequestRateLimiter {
+ private final IRateLimiter writeLimiter;
+ private final IRateLimiter readLimiter;
+
+ public AzureRequestRateLimiter(AzBlobStorageClientConfig config) {
+ long tokenAcquireTimeout = config.getTokenAcquireTimeout();
+ this.writeLimiter = createLimiter(config.getWriteMaxRequestsPerSeconds(), tokenAcquireTimeout);
+ this.readLimiter = createLimiter(config.getReadMaxRequestsPerSeconds(), tokenAcquireTimeout);
+ }
+
+ @Override
+ public void writeRequest() {
+ writeLimiter.acquire();
+ }
+
+ @Override
+ public void readRequest() {
+ readLimiter.acquire();
+ }
+
+ @Override
+ public void listRequest() {
+ readLimiter.acquire();
+ }
+
+ private static IRateLimiter createLimiter(int maxRequestsPerSecond, long tokeAcquireTimeout) {
+ if (maxRequestsPerSecond > 0) {
+ return new TokenBasedRateLimiter(maxRequestsPerSecond, tokeAcquireTimeout);
+ }
+ return NoOpRateLimiter.INSTANCE;
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java
index 4edb7a7..e4e471d 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSClientConfig.java
@@ -19,52 +19,68 @@
package org.apache.asterix.cloud.clients.google.gcs;
import static org.apache.asterix.external.util.google.gcs.GCSConstants.ENDPOINT_FIELD_NAME;
+import static org.apache.asterix.external.util.google.gcs.GCSConstants.STORAGE_PREFIX;
import java.io.IOException;
import java.util.Map;
import org.apache.asterix.common.config.CloudProperties;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.util.StorageUtil;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.OAuth2Credentials;
import com.google.cloud.NoCredentials;
public class GCSClientConfig {
- public static final int WRITE_BUFFER_SIZE = StorageUtil.getIntSizeInBytes(1, StorageUtil.StorageUnit.MEGABYTE);
+
// The maximum number of files that can be deleted (GCS restriction): https://cloud.google.com/storage/quotas#json-requests
static final int DELETE_BATCH_SIZE = 100;
private final String region;
private final String endpoint;
- private final String prefix;
private final boolean anonymousAuth;
private final long profilerLogInterval;
+ private final long tokenAcquireTimeout;
+ private final int readMaxRequestsPerSeconds;
+ private final int writeMaxRequestsPerSeconds;
+ private final int writeBufferSize;
+ private final String prefix;
- public GCSClientConfig(String region, String endpoint, String prefix, boolean anonymousAuth,
- long profilerLogInterval) {
+ private GCSClientConfig(String region, String endpoint, boolean anonymousAuth, long profilerLogInterval,
+ long tokenAcquireTimeout, int writeMaxRequestsPerSeconds, int readMaxRequestsPerSeconds,
+ int writeBufferSize, String prefix) {
this.region = region;
this.endpoint = endpoint;
- this.prefix = prefix;
this.anonymousAuth = anonymousAuth;
this.profilerLogInterval = profilerLogInterval;
+ this.tokenAcquireTimeout = tokenAcquireTimeout;
+ this.writeMaxRequestsPerSeconds = writeMaxRequestsPerSeconds;
+ this.readMaxRequestsPerSeconds = readMaxRequestsPerSeconds;
+ this.writeBufferSize = writeBufferSize;
+ this.prefix = prefix;
+ }
+
+ public GCSClientConfig(String region, String endpoint, boolean anonymousAuth, long profilerLogInterval,
+ int writeBufferSize, String prefix) {
+ this(region, endpoint, anonymousAuth, profilerLogInterval, 1, 0, 0, writeBufferSize, prefix);
}
public static GCSClientConfig of(CloudProperties cloudProperties) {
return new GCSClientConfig(cloudProperties.getStorageRegion(), cloudProperties.getStorageEndpoint(),
- cloudProperties.getStoragePrefix(), cloudProperties.isStorageAnonymousAuth(),
- cloudProperties.getProfilerLogInterval());
+ cloudProperties.isStorageAnonymousAuth(), cloudProperties.getProfilerLogInterval(),
+ cloudProperties.getTokenAcquireTimeout(), cloudProperties.getWriteMaxRequestsPerSecond(),
+ cloudProperties.getReadMaxRequestsPerSecond(), cloudProperties.getWriteBufferSize(),
+ cloudProperties.getStoragePrefix());
}
- public static GCSClientConfig of(Map<String, String> configuration) {
+ public static GCSClientConfig of(Map<String, String> configuration, int writeBufferSize) {
String endPoint = configuration.getOrDefault(ENDPOINT_FIELD_NAME, "");
long profilerLogInterval = 0;
String region = "";
- String prefix = "";
+ String prefix = configuration.getOrDefault(STORAGE_PREFIX, "");
boolean anonymousAuth = false;
- return new GCSClientConfig(region, endPoint, prefix, anonymousAuth, profilerLogInterval);
+ return new GCSClientConfig(region, endPoint, anonymousAuth, profilerLogInterval, writeBufferSize, prefix);
}
public String getRegion() {
@@ -75,10 +91,6 @@
return endpoint;
}
- public String getPrefix() {
- return prefix;
- }
-
public long getProfilerLogInterval() {
return profilerLogInterval;
}
@@ -94,4 +106,24 @@
throw HyracksDataException.create(e);
}
}
+
+ public long getTokenAcquireTimeout() {
+ return tokenAcquireTimeout;
+ }
+
+ public int getWriteMaxRequestsPerSeconds() {
+ return writeMaxRequestsPerSeconds;
+ }
+
+ public int getReadMaxRequestsPerSeconds() {
+ return readMaxRequestsPerSeconds;
+ }
+
+ public int getWriteBufferSize() {
+ return writeBufferSize;
+ }
+
+ public String getPrefix() {
+ return prefix;
+ }
}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java
index de242bd..62ca4ec 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSCloudClient.java
@@ -40,10 +40,10 @@
import org.apache.asterix.cloud.clients.IParallelDownloader;
import org.apache.asterix.cloud.clients.profiler.CountRequestProfilerLimiter;
import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
-import org.apache.asterix.cloud.clients.profiler.NoOpRequestProfilerLimiter;
-import org.apache.asterix.cloud.clients.profiler.limiter.NoOpRequestLimiter;
+import org.apache.asterix.cloud.clients.profiler.RequestLimiterNoOpProfiler;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.api.util.CleanupUtils;
import org.apache.hyracks.api.util.IoUtil;
import org.apache.hyracks.control.nc.io.IOManager;
@@ -68,16 +68,19 @@
private final GCSClientConfig config;
private final ICloudGuardian guardian;
private final IRequestProfilerLimiter profilerLimiter;
+ private final int writeBufferSize;
public GCSCloudClient(GCSClientConfig config, Storage gcsClient, ICloudGuardian guardian) {
this.gcsClient = gcsClient;
this.config = config;
this.guardian = guardian;
+ this.writeBufferSize = config.getWriteBufferSize();
long profilerInterval = config.getProfilerLogInterval();
+ GCSRequestRateLimiter limiter = new GCSRequestRateLimiter(config);
if (profilerInterval > 0) {
- profilerLimiter = new CountRequestProfilerLimiter(profilerInterval, NoOpRequestLimiter.INSTANCE);
+ profilerLimiter = new CountRequestProfilerLimiter(profilerInterval, limiter);
} else {
- profilerLimiter = NoOpRequestProfilerLimiter.INSTANCE;
+ profilerLimiter = new RequestLimiterNoOpProfiler(limiter);
}
guardian.setCloudClient(this);
}
@@ -88,7 +91,7 @@
@Override
public int getWriteBufferSize() {
- return GCSClientConfig.WRITE_BUFFER_SIZE;
+ return writeBufferSize;
}
@Override
@@ -98,20 +101,20 @@
@Override
public ICloudWriter createWriter(String bucket, String path, IWriteBufferProvider bufferProvider) {
- return new GCSWriter(bucket, path, gcsClient, profilerLimiter);
+ return new GCSWriter(bucket, config.getPrefix() + path, gcsClient, profilerLimiter, guardian, writeBufferSize);
}
@Override
public Set<CloudFile> listObjects(String bucket, String path, FilenameFilter filter) {
guardian.checkReadAccess(bucket, path);
profilerLimiter.objectsList();
- Page<Blob> blobs =
- gcsClient.list(bucket, BlobListOption.prefix(path), BlobListOption.fields(Storage.BlobField.SIZE));
+ Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(config.getPrefix() + path),
+ BlobListOption.fields(Storage.BlobField.SIZE));
Set<CloudFile> files = new HashSet<>();
for (Blob blob : blobs.iterateAll()) {
if (filter.accept(null, IoUtil.getFileNameFromPath(blob.getName()))) {
- files.add(CloudFile.of(blob.getName(), blob.getSize()));
+ files.add(CloudFile.of(stripCloudPrefix(blob.getName()), blob.getSize()));
}
}
return files;
@@ -119,8 +122,9 @@
@Override
public int read(String bucket, String path, long offset, ByteBuffer buffer) throws HyracksDataException {
+ guardian.checkReadAccess(bucket, path);
profilerLimiter.objectGet();
- BlobId blobId = BlobId.of(bucket, path);
+ BlobId blobId = BlobId.of(bucket, config.getPrefix() + path);
long readTo = offset + buffer.remaining();
int totalRead = 0;
try (ReadChannel from = gcsClient.reader(blobId).limit(readTo)) {
@@ -140,8 +144,9 @@
@Override
public byte[] readAllBytes(String bucket, String path) {
+ guardian.checkReadAccess(bucket, path);
profilerLimiter.objectGet();
- BlobId blobId = BlobId.of(bucket, path);
+ BlobId blobId = BlobId.of(bucket, config.getPrefix() + path);
try {
return gcsClient.readAllBytes(blobId);
} catch (StorageException e) {
@@ -151,12 +156,15 @@
@Override
public InputStream getObjectStream(String bucket, String path, long offset, long length) {
+ guardian.checkReadAccess(bucket, path);
profilerLimiter.objectGet();
- try (ReadChannel reader = gcsClient.reader(bucket, path).limit(offset + length)) {
+ ReadChannel reader = null;
+ try {
+ reader = gcsClient.reader(bucket, config.getPrefix() + path).limit(offset + length);
reader.seek(offset);
return Channels.newInputStream(reader);
- } catch (StorageException | IOException e) {
- throw new IllegalStateException(e);
+ } catch (StorageException | IOException ex) {
+ throw new RuntimeException(CleanupUtils.close(reader, ex));
}
}
@@ -164,14 +172,15 @@
public void write(String bucket, String path, byte[] data) {
guardian.checkWriteAccess(bucket, path);
profilerLimiter.objectWrite();
- BlobInfo blobInfo = BlobInfo.newBuilder(bucket, path).build();
+ BlobInfo blobInfo = BlobInfo.newBuilder(bucket, config.getPrefix() + path).build();
gcsClient.create(blobInfo, data);
}
@Override
public void copy(String bucket, String srcPath, FileReference destPath) {
- Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(srcPath));
+ guardian.checkReadAccess(bucket, srcPath);
profilerLimiter.objectsList();
+ Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(config.getPrefix() + srcPath));
for (Blob blob : blobs.iterateAll()) {
profilerLimiter.objectCopy();
BlobId source = blob.getBlobId();
@@ -194,7 +203,7 @@
while (pathIter.hasNext()) {
batchRequest = gcsClient.batch();
for (int i = 0; pathIter.hasNext() && i < DELETE_BATCH_SIZE; i++) {
- BlobId blobId = BlobId.of(bucket, pathIter.next());
+ BlobId blobId = BlobId.of(bucket, config.getPrefix() + pathIter.next());
guardian.checkWriteAccess(bucket, blobId.getName());
batchRequest.delete(blobId);
}
@@ -208,7 +217,8 @@
public long getObjectSize(String bucket, String path) {
guardian.checkReadAccess(bucket, path);
profilerLimiter.objectGet();
- Blob blob = gcsClient.get(bucket, path, Storage.BlobGetOption.fields(Storage.BlobField.SIZE));
+ Blob blob =
+ gcsClient.get(bucket, config.getPrefix() + path, Storage.BlobGetOption.fields(Storage.BlobField.SIZE));
if (blob == null) {
return 0;
}
@@ -219,7 +229,8 @@
public boolean exists(String bucket, String path) {
guardian.checkReadAccess(bucket, path);
profilerLimiter.objectGet();
- Blob blob = gcsClient.get(bucket, path, Storage.BlobGetOption.fields(Storage.BlobField.values()));
+ Blob blob = gcsClient.get(bucket, config.getPrefix() + path,
+ Storage.BlobGetOption.fields(Storage.BlobField.values()));
return blob != null && blob.exists();
}
@@ -227,7 +238,7 @@
public boolean isEmptyPrefix(String bucket, String path) {
guardian.checkReadAccess(bucket, path);
profilerLimiter.objectsList();
- Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(path));
+ Page<Blob> blobs = gcsClient.list(bucket, BlobListOption.prefix(config.getPrefix() + path));
return !blobs.hasNextPage();
}
@@ -272,4 +283,8 @@
}
return builder.build().getService();
}
-}
+
+ private String stripCloudPrefix(String objectName) {
+ return objectName.substring(config.getPrefix().length());
+ }
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java
index 0994cea..0d30120 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSParallelDownloader.java
@@ -56,6 +56,7 @@
private final Storage gcsClient;
private final TransferManager transferManager;
private final IRequestProfilerLimiter profiler;
+ private final GCSClientConfig config;
public GCSParallelDownloader(String bucket, IOManager ioManager, GCSClientConfig config,
IRequestProfilerLimiter profiler) throws HyracksDataException {
@@ -70,18 +71,21 @@
this.gcsClient = builder.build().getService();
this.transferManager =
TransferManagerConfig.newBuilder().setStorageOptions(builder.build()).build().getService();
+ this.config = config;
}
@Override
public void downloadFiles(Collection<FileReference> toDownload) throws HyracksDataException {
- ParallelDownloadConfig.Builder config = ParallelDownloadConfig.newBuilder().setBucketName(bucket);
+ ParallelDownloadConfig.Builder downConfig =
+ ParallelDownloadConfig.newBuilder().setBucketName(bucket).setStripPrefix(this.config.getPrefix());
+
Map<Path, List<BlobInfo>> pathListMap = new HashMap<>();
try {
for (FileReference fileReference : toDownload) {
profiler.objectGet();
FileUtils.createParentDirectories(fileReference.getFile());
- addToMap(pathListMap, fileReference.getDeviceHandle().getMount().toPath(),
- BlobInfo.newBuilder(BlobId.of(bucket, fileReference.getRelativePath())).build());
+ addToMap(pathListMap, fileReference.getDeviceHandle().getMount().toPath(), BlobInfo
+ .newBuilder(BlobId.of(bucket, config.getPrefix() + fileReference.getRelativePath())).build());
}
} catch (IOException e) {
throw HyracksDataException.create(e);
@@ -89,7 +93,7 @@
List<DownloadJob> downloadJobs = new ArrayList<>(pathListMap.size());
for (Map.Entry<Path, List<BlobInfo>> entry : pathListMap.entrySet()) {
downloadJobs.add(transferManager.downloadBlobs(entry.getValue(),
- config.setDownloadDirectory(entry.getKey()).build()));
+ downConfig.setDownloadDirectory(entry.getKey()).build()));
}
downloadJobs.forEach(DownloadJob::getDownloadResults);
}
@@ -98,20 +102,22 @@
public Collection<FileReference> downloadDirectories(Collection<FileReference> toDownload)
throws HyracksDataException {
Set<FileReference> failedFiles = new HashSet<>();
- ParallelDownloadConfig.Builder config = ParallelDownloadConfig.newBuilder().setBucketName(bucket);
+ ParallelDownloadConfig.Builder config =
+ ParallelDownloadConfig.newBuilder().setBucketName(bucket).setStripPrefix(this.config.getPrefix());
Map<Path, List<BlobInfo>> pathListMap = new HashMap<>();
for (FileReference fileReference : toDownload) {
profiler.objectMultipartDownload();
- Page<Blob> blobs = gcsClient.list(bucket, Storage.BlobListOption.prefix(fileReference.getRelativePath()));
+ Page<Blob> blobs = gcsClient.list(bucket,
+ Storage.BlobListOption.prefix(this.config.getPrefix() + fileReference.getRelativePath()));
for (Blob blob : blobs.iterateAll()) {
addToMap(pathListMap, fileReference.getDeviceHandle().getMount().toPath(), blob.asBlobInfo());
}
}
List<DownloadJob> downloadJobs = new ArrayList<>(pathListMap.size());
for (Map.Entry<Path, List<BlobInfo>> entry : pathListMap.entrySet()) {
- downloadJobs.add(transferManager.downloadBlobs(entry.getValue(),
- config.setDownloadDirectory(entry.getKey()).build()));
+ ParallelDownloadConfig parallelDownloadConfig = config.setDownloadDirectory(entry.getKey()).build();
+ downloadJobs.add(transferManager.downloadBlobs(entry.getValue(), parallelDownloadConfig));
}
List<DownloadResult> results;
for (DownloadJob job : downloadJobs) {
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSRequestRateLimiter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSRequestRateLimiter.java
new file mode 100644
index 0000000..71f6b8c
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSRequestRateLimiter.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.cloud.clients.google.gcs;
+
+import org.apache.asterix.cloud.clients.profiler.limiter.IRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.IRequestRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.NoOpRateLimiter;
+import org.apache.asterix.cloud.clients.profiler.limiter.TokenBasedRateLimiter;
+
+public class GCSRequestRateLimiter implements IRequestRateLimiter {
+ private final IRateLimiter writeLimiter;
+ private final IRateLimiter readLimiter;
+
+ public GCSRequestRateLimiter(GCSClientConfig config) {
+ long tokenAcquireTimeout = config.getTokenAcquireTimeout();
+ this.writeLimiter = createLimiter(config.getWriteMaxRequestsPerSeconds(), tokenAcquireTimeout);
+ this.readLimiter = createLimiter(config.getReadMaxRequestsPerSeconds(), tokenAcquireTimeout);
+ }
+
+ @Override
+ public void writeRequest() {
+ writeLimiter.acquire();
+ }
+
+ @Override
+ public void readRequest() {
+ readLimiter.acquire();
+ }
+
+ @Override
+ public void listRequest() {
+ readLimiter.acquire();
+ }
+
+ private static IRateLimiter createLimiter(int maxRequestsPerSecond, long tokeAcquireTimeout) {
+ if (maxRequestsPerSecond > 0) {
+ return new TokenBasedRateLimiter(maxRequestsPerSecond, tokeAcquireTimeout);
+ }
+ return NoOpRateLimiter.INSTANCE;
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java
index 41d1a71..8d68f01 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/clients/google/gcs/GCSWriter.java
@@ -18,11 +18,10 @@
*/
package org.apache.asterix.cloud.clients.google.gcs;
-import static org.apache.asterix.cloud.clients.google.gcs.GCSClientConfig.WRITE_BUFFER_SIZE;
-
import java.io.IOException;
import java.nio.ByteBuffer;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
import org.apache.asterix.cloud.clients.ICloudWriter;
import org.apache.asterix.cloud.clients.profiler.IRequestProfilerLimiter;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -40,14 +39,20 @@
private final String path;
private final IRequestProfilerLimiter profiler;
private final Storage gcsClient;
+ private final ICloudGuardian guardian;
+ private final int writeBufferSize;
+
private WriteChannel writer = null;
private long writtenBytes;
- public GCSWriter(String bucket, String path, Storage gcsClient, IRequestProfilerLimiter profiler) {
+ public GCSWriter(String bucket, String path, Storage gcsClient, IRequestProfilerLimiter profiler,
+ ICloudGuardian guardian, int writeBufferSize) {
this.bucket = bucket;
this.path = path;
this.profiler = profiler;
this.gcsClient = gcsClient;
+ this.guardian = guardian;
+ this.writeBufferSize = writeBufferSize;
writtenBytes = 0;
}
@@ -58,6 +63,7 @@
@Override
public int write(ByteBuffer page) throws HyracksDataException {
+ guardian.checkIsolatedWriteAccess(bucket, path);
profiler.objectMultipartUpload();
setUploadId();
int written = 0;
@@ -93,6 +99,7 @@
@Override
public void finish() throws HyracksDataException {
+ guardian.checkWriteAccess(bucket, path);
setUploadId();
profiler.objectMultipartUpload();
try {
@@ -115,7 +122,7 @@
private void setUploadId() {
if (writer == null) {
writer = gcsClient.writer(BlobInfo.newBuilder(BlobId.of(bucket, path)).build());
- writer.setChunkSize(WRITE_BUFFER_SIZE);
+ writer.setChunkSize(writeBufferSize);
writtenBytes = 0;
log("STARTED");
}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java
index 91de5ae..e50559f 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/lazy/filesystem/HolePuncherProvider.java
@@ -52,7 +52,7 @@
// Running a debug hole puncher on a non-Linux box
String osName = FileSystemOperationDispatcherUtil.getOSName();
- LOGGER.warn("Using 'DebugHolePuncher' as the OS '{}' does not support punishing holes", osName);
+ LOGGER.warn("Using 'DebugHolePuncher' as the OS '{}' does not support punching holes", osName);
return new DebugHolePuncher(cloudIOManager, bufferProvider);
}
diff --git a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java
index 9e9c003..886f20d 100644
--- a/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java
+++ b/asterixdb/asterix-cloud/src/main/java/org/apache/asterix/cloud/writer/GCSExternalFileWriterFactory.java
@@ -62,7 +62,7 @@
@Override
ICloudClient createCloudClient() throws CompilationException {
- GCSClientConfig config = GCSClientConfig.of(configuration);
+ GCSClientConfig config = GCSClientConfig.of(configuration, writeBufferSize);
return new GCSCloudClient(config, GCSUtils.buildClient(configuration),
ICloudGuardian.NoOpCloudGuardian.INSTANCE);
}
diff --git a/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/azure/LSMAzBlobStorageTest.java b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/azure/LSMAzBlobStorageTest.java
new file mode 100644
index 0000000..1f49fd9
--- /dev/null
+++ b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/azure/LSMAzBlobStorageTest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.cloud.azure;
+
+import org.apache.asterix.cloud.AbstractLSMTest;
+import org.apache.asterix.cloud.clients.ICloudGuardian;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageClientConfig;
+import org.apache.asterix.cloud.clients.azure.blobstorage.AzBlobStorageCloudClient;
+import org.apache.hyracks.util.StorageUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import com.azure.core.http.rest.PagedIterable;
+import com.azure.storage.blob.BlobClient;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobServiceClient;
+import com.azure.storage.blob.BlobServiceClientBuilder;
+import com.azure.storage.blob.models.BlobItem;
+import com.azure.storage.blob.models.ListBlobsOptions;
+import com.azure.storage.common.StorageSharedKeyCredential;
+
+public class LSMAzBlobStorageTest extends AbstractLSMTest {
+ private static BlobContainerClient client;
+
+ private static BlobServiceClient blobServiceClient;
+ private static final int MOCK_SERVER_PORT = 15055;
+ private static final String MOCK_SERVER_HOSTNAME = "http://127.0.0.1:" + MOCK_SERVER_PORT;
+ private static final String MOCK_SERVER_REGION = "us-west-2";
+
+ @BeforeClass
+ public static void setup() throws Exception {
+ LOGGER.info("LSMAzBlobStorageTest setup");
+
+ String endpointString = "http://127.0.0.1:15055/devstoreaccount1/" + PLAYGROUND_CONTAINER;
+ final String accKey =
+ "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
+ final String accName = "devstoreaccount1";
+
+ blobServiceClient = new BlobServiceClientBuilder().endpoint(endpointString)
+ .credential(new StorageSharedKeyCredential(accName, accKey)).buildClient();
+
+ // Start the test clean by deleting any residual data from previous tests
+ blobServiceClient.deleteBlobContainerIfExists(PLAYGROUND_CONTAINER);
+ client = blobServiceClient.createBlobContainerIfNotExists(PLAYGROUND_CONTAINER);
+
+ LOGGER.info("Az Blob Client created successfully");
+ int writeBufferSize = StorageUtil.getIntSizeInBytes(5, StorageUtil.StorageUnit.MEGABYTE);
+ AzBlobStorageClientConfig config = new AzBlobStorageClientConfig(MOCK_SERVER_REGION, MOCK_SERVER_HOSTNAME, "",
+ true, 0, PLAYGROUND_CONTAINER, 1, 0, 0, writeBufferSize);
+ CLOUD_CLIENT = new AzBlobStorageCloudClient(config, ICloudGuardian.NoOpCloudGuardian.INSTANCE);
+ }
+
+ private static void cleanup() {
+ try {
+ PagedIterable<BlobItem> blobItems = client.listBlobs(new ListBlobsOptions().setPrefix(""), null);
+ // Delete all the contents of the container
+ for (BlobItem blobItem : blobItems) {
+ BlobClient blobClient = client.getBlobClient(blobItem.getName());
+ blobClient.delete();
+ }
+ // Delete the container
+ blobServiceClient.deleteBlobContainer(PLAYGROUND_CONTAINER);
+ } catch (Exception ex) {
+ // ignore
+ }
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ LOGGER.info("Shutdown Azurite");
+ // Azure clients do not need explicit closure.
+ cleanup();
+ }
+}
diff --git a/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java
index 3c62cce..08864ac 100644
--- a/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java
+++ b/asterixdb/asterix-cloud/src/test/java/org/apache/asterix/cloud/gcs/LSMGCSTest.java
@@ -22,6 +22,7 @@
import org.apache.asterix.cloud.clients.ICloudGuardian;
import org.apache.asterix.cloud.clients.google.gcs.GCSClientConfig;
import org.apache.asterix.cloud.clients.google.gcs.GCSCloudClient;
+import org.apache.hyracks.util.StorageUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -48,7 +49,9 @@
client.create(BucketInfo.newBuilder(PLAYGROUND_CONTAINER).setStorageClass(StorageClass.STANDARD)
.setLocation(MOCK_SERVER_REGION).build());
LOGGER.info("Client created successfully");
- GCSClientConfig config = new GCSClientConfig(MOCK_SERVER_REGION, MOCK_SERVER_HOSTNAME, "", true, 0);
+ int writeBufferSize = StorageUtil.getIntSizeInBytes(5, StorageUtil.StorageUnit.MEGABYTE);
+ GCSClientConfig config =
+ new GCSClientConfig(MOCK_SERVER_REGION, MOCK_SERVER_HOSTNAME, true, 0, writeBufferSize, "");
CLOUD_CLIENT = new GCSCloudClient(config, ICloudGuardian.NoOpCloudGuardian.INSTANCE);
}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java
index 6480c30..cb447c8 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/assembler/AssemblerBuilderVisitor.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.column.assembler;
-import static org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
+import static org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
import java.util.ArrayList;
import java.util.BitSet;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java
index 0bea188..1f74fb3 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/ObjectSchemaNode.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.column.metadata.schema;
-import static org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
+import static org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary.DUMMY_FIELD_NAME_INDEX;
import java.io.DataInput;
import java.io.DataInputStream;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java
index 05c4eda..c7d3df1 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaBuilderFromIATypeVisitor.java
@@ -22,12 +22,12 @@
import java.util.List;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
import org.apache.asterix.column.metadata.schema.collection.AbstractCollectionSchemaNode;
import org.apache.asterix.column.metadata.schema.primitive.MissingFieldSchemaNode;
import org.apache.asterix.column.operation.lsm.flush.FlushColumnMetadata;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.AUnionType;
import org.apache.asterix.om.types.AbstractCollectionType;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java
index afe8368..ff05568 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/schema/visitor/SchemaClipperVisitor.java
@@ -21,13 +21,13 @@
import java.io.IOException;
import java.util.Map;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
import org.apache.asterix.column.metadata.schema.UnionSchemaNode;
import org.apache.asterix.column.metadata.schema.collection.AbstractCollectionSchemaNode;
import org.apache.asterix.column.metadata.schema.primitive.MissingFieldSchemaNode;
import org.apache.asterix.column.metadata.schema.primitive.PrimitiveSchemaNode;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.AUnionType;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java
index f514638..87f9ff3 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/lsm/flush/FlushColumnMetadata.java
@@ -33,9 +33,7 @@
import java.util.Map;
import org.apache.asterix.column.metadata.AbstractColumnMetadata;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
import org.apache.asterix.column.metadata.PathInfoSerializer;
-import org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNestedNode;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
@@ -51,6 +49,8 @@
import org.apache.asterix.column.values.IColumnValuesWriter;
import org.apache.asterix.column.values.IColumnValuesWriterFactory;
import org.apache.asterix.column.values.writer.AbstractColumnValuesWriter;
+import org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.commons.lang3.mutable.Mutable;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java
index e507d53..15a6277 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnMetadata.java
@@ -41,8 +41,6 @@
import org.apache.asterix.column.filter.range.IColumnRangeFilterEvaluatorFactory;
import org.apache.asterix.column.filter.range.IColumnRangeFilterValueAccessor;
import org.apache.asterix.column.metadata.AbstractColumnImmutableReadMetadata;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
import org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor;
@@ -50,6 +48,8 @@
import org.apache.asterix.column.values.IColumnValuesReader;
import org.apache.asterix.column.values.IColumnValuesReaderFactory;
import org.apache.asterix.column.values.reader.PrimitiveColumnValuesReader;
+import org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.runtime.projection.FunctionCallInformation;
import org.apache.hyracks.api.context.IHyracksTaskContext;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java
index 356ddaa..d931242 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/operation/query/QueryColumnWithMetaMetadata.java
@@ -38,14 +38,14 @@
import org.apache.asterix.column.filter.iterable.IColumnIterableFilterEvaluatorFactory;
import org.apache.asterix.column.filter.range.IColumnRangeFilterEvaluatorFactory;
import org.apache.asterix.column.filter.range.IColumnRangeFilterValueAccessor;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
import org.apache.asterix.column.metadata.schema.visitor.SchemaClipperVisitor;
import org.apache.asterix.column.values.IColumnValuesReader;
import org.apache.asterix.column.values.IColumnValuesReaderFactory;
import org.apache.asterix.column.values.reader.PrimitiveColumnValuesReader;
+import org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.runtime.projection.FunctionCallInformation;
import org.apache.hyracks.api.context.IHyracksTaskContext;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java
index b425a26..a4b9240 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaJSONBuilderVisitor.java
@@ -21,7 +21,6 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ISchemaNodeVisitor;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
@@ -30,6 +29,7 @@
import org.apache.asterix.column.metadata.schema.primitive.PrimitiveSchemaNode;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IValueReference;
import org.apache.hyracks.data.std.util.ByteArrayAccessibleDataInputStream;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java
index 6d991cf..ad0e460 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java
+++ b/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/util/SchemaStringBuilderVisitor.java
@@ -21,7 +21,6 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
import org.apache.asterix.column.metadata.schema.AbstractSchemaNode;
import org.apache.asterix.column.metadata.schema.ISchemaNodeVisitor;
import org.apache.asterix.column.metadata.schema.ObjectSchemaNode;
@@ -30,6 +29,7 @@
import org.apache.asterix.column.metadata.schema.primitive.PrimitiveSchemaNode;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IValueReference;
import org.apache.hyracks.data.std.util.ByteArrayAccessibleDataInputStream;
diff --git a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java
index 63c2b22..4b33e19 100644
--- a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java
+++ b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameDictionaryPerfTest.java
@@ -20,11 +20,11 @@
import java.util.concurrent.TimeUnit;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
-import org.apache.asterix.column.metadata.dictionary.FieldNamesHashDictionary;
-import org.apache.asterix.column.metadata.dictionary.FieldNamesTrieDictionary;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.dictionary.FieldNamesHashDictionary;
+import org.apache.asterix.om.dictionary.FieldNamesTrieDictionary;
+import org.apache.asterix.om.dictionary.IFieldNamesDictionary;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IValueReference;
diff --git a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java
index c9f58d5..1ec468c 100644
--- a/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java
+++ b/asterixdb/asterix-column/src/test/java/org/apache/asterix/column/metadata/trie/FieldNameTrieTest.java
@@ -28,8 +28,8 @@
import java.util.Collections;
import java.util.List;
-import org.apache.asterix.column.metadata.dictionary.FieldNameTrie;
-import org.apache.asterix.column.metadata.dictionary.FieldNamesTrieDictionary;
+import org.apache.asterix.om.dictionary.FieldNameTrie;
+import org.apache.asterix.om.dictionary.FieldNamesTrieDictionary;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IValueReference;
import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
index b6bce47..ba26ba7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IIdentifierMapper.java
@@ -19,13 +19,29 @@
package org.apache.asterix.common.api;
+import org.apache.commons.lang3.StringUtils;
+
@FunctionalInterface
public interface IIdentifierMapper {
enum Modifier {
SINGULAR,
+ SINGULAR_CAPITALIZED,
PLURAL,
- NONE
+ PLURAL_CAPITALIZED,
+ NONE,
+ NONE_CAPITALIZED;
+
+ public String fixup(String input) {
+ switch (this) {
+ case SINGULAR_CAPITALIZED:
+ case PLURAL_CAPITALIZED:
+ case NONE_CAPITALIZED:
+ return StringUtils.capitalize(input);
+ default:
+ return input;
+ }
+ }
}
String map(String identifier, Modifier modifier);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
index 8ca5c94..957cb84 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
@@ -152,7 +152,11 @@
COMPILER_COPY_TO_WRITE_BUFFER_SIZE(
getRangedIntegerType(5, Integer.MAX_VALUE),
StorageUtil.getIntSizeInBytes(8, StorageUtil.StorageUnit.MEGABYTE),
- "The COPY TO write buffer size in bytes. (default: 8MB, min: 5MB)");
+ "The COPY TO write buffer size in bytes. (default: 8MB, min: 5MB)"),
+ COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING(
+ getRangedIntegerType(0, Integer.MAX_VALUE),
+ 128,
+ "Maximum occurrences of a variable allowed in an expression for inlining");
private final IOptionType type;
private final Object defaultValue;
@@ -234,6 +238,9 @@
public static final String COMPILER_COLUMN_FILTER_KEY = Option.COMPILER_COLUMN_FILTER.ini();
+ public static final String COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING_KEY =
+ Option.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING.ini();
+
public static final int COMPILER_PARALLELISM_AS_STORAGE = 0;
public CompilerProperties(PropertiesAccessor accessor) {
@@ -369,4 +376,8 @@
public int getCopyToWriteBufferSize() {
return accessor.getInt(Option.COMPILER_COPY_TO_WRITE_BUFFER_SIZE);
}
+
+ public int getMaxVariableOccurrencesForInlining() {
+ return accessor.getInt(Option.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING);
+ }
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java
index 160c04d..28ab077 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/OptimizationConfUtil.java
@@ -95,6 +95,8 @@
compilerProperties.getQueryPlanShapeMode());
boolean columnFilter = getBoolean(querySpecificConfig, CompilerProperties.COMPILER_COLUMN_FILTER_KEY,
compilerProperties.isColumnFilter());
+ int maxVariableOccurrencesForInlining =
+ getMaxVariableOccurrencesForInlining(compilerProperties, querySpecificConfig, sourceLoc);
PhysicalOptimizationConfig physOptConf = new PhysicalOptimizationConfig();
physOptConf.setFrameSize(frameSize);
@@ -123,6 +125,7 @@
physOptConf.setMinJoinFrames(compilerProperties.getMinJoinMemoryFrames());
physOptConf.setMinGroupFrames(compilerProperties.getMinGroupMemoryFrames());
physOptConf.setMinWindowFrames(compilerProperties.getMinWindowMemoryFrames());
+ physOptConf.setMaxVariableOccurrencesForInlining(maxVariableOccurrencesForInlining);
// We should have already validated the parameter names at this point...
Set<String> filteredParameterNames = new HashSet<>(parameterNames);
@@ -219,4 +222,16 @@
}
return defaultValue;
}
+
+ private static int getMaxVariableOccurrencesForInlining(CompilerProperties compilerProperties,
+ Map<String, Object> querySpecificConfig, SourceLocation sourceLoc) throws AsterixException {
+ String valueInQuery =
+ (String) querySpecificConfig.get(CompilerProperties.COMPILER_MAX_VARIABLE_OCCURRENCES_INLINING_KEY);
+ try {
+ return valueInQuery == null ? compilerProperties.getMaxVariableOccurrencesForInlining()
+ : OptionTypes.NONNEGATIVE_INTEGER.parse(valueInQuery);
+ } catch (IllegalArgumentException e) {
+ throw AsterixException.create(ErrorCode.COMPILATION_ERROR, sourceLoc, e.getMessage());
+ }
+ }
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
index 3694bb2..832a7cc 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.common.exceptions;
+import org.apache.asterix.common.utils.IdentifierUtil;
import org.apache.hyracks.api.exceptions.IError;
import org.apache.hyracks.api.util.ErrorMessageUtil;
@@ -303,6 +304,11 @@
DUPLICATE_FIELD_IN_PRIMARY_KEY(1198),
INCOMPATIBLE_FIELDS_IN_PRIMARY_KEY(1199),
PREFIX_SHOULD_NOT_START_WITH_SLASH(1200),
+ ILLEGAL_SIZE_PROVIDED(1201),
+ UNSUPPORTED_WRITER_COMPRESSION_SCHEME(1202),
+ INVALID_PARQUET_SCHEMA(1203),
+ TYPE_UNSUPPORTED_PARQUET_WRITE(1204),
+ INVALID_PARQUET_WRITER_VERSION(1205),
// Feed errors
DATAFLOW_ILLEGAL_STATE(3001),
@@ -460,8 +466,8 @@
}
private static class ErrorMessageMapHolder {
- private static final String[] enumMessages =
- ErrorMessageUtil.defineMessageEnumOrdinalMap(values(), RESOURCE_PATH);
+ private static final String[] enumMessages = IdentifierUtil
+ .replaceIdentifiers(ErrorMessageUtil.defineMessageEnumOrdinalMap(values(), RESOURCE_PATH));
private static String get(ErrorCode errorCode) {
return enumMessages[errorCode.ordinal()];
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
index 9b1d5ae..5bff2c1 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
@@ -38,6 +38,9 @@
public static final String SYSTEM_DATABASE = "System";
public static final String DEFAULT_DATABASE = "Default";
+ // Pre-defined owner
+ public static final String DEFAULT_OWNER = "@sys";
+
// Name of the dataverse the metadata lives in.
public static final DataverseName METADATA_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Metadata");
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
index 8157125..a32f7f9 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierMappingUtil.java
@@ -21,42 +21,59 @@
import static org.apache.asterix.common.utils.IdentifierUtil.DATASET;
import static org.apache.asterix.common.utils.IdentifierUtil.DATAVERSE;
+import static org.apache.asterix.common.utils.IdentifierUtil.PRODUCT_ABBREVIATION;
+import static org.apache.asterix.common.utils.IdentifierUtil.PRODUCT_NAME;
import org.apache.asterix.common.api.IIdentifierMapper;
import org.apache.asterix.common.api.IIdentifierMapper.Modifier;
public class IdentifierMappingUtil {
+ private static final String PLAIN_DATASET = "dataset";
private static final String SINGULAR_DATASET = "a dataset";
private static final String PLURAL_DATASET = "datasets";
+ private static final String PLAIN_DATAVERSE = "dataverse";
private static final String SINGULAR_DATAVERSE = "a dataverse";
private static final String PLURAL_DATAVERSE = "dataverses";
+ private static final String DEFAULT_PRODUCT_NAME = "Apache AsterixDB";
+ private static final String DEFAULT_PRODUCT_ABBREVIATION = "AsterixDB";
+
private static final IIdentifierMapper DEFAULT_MAPPER = (identifier, modifier) -> {
switch (identifier) {
case DATASET:
switch (modifier) {
case NONE:
- return DATASET;
+ case NONE_CAPITALIZED:
+ return modifier.fixup(PLAIN_DATASET);
case SINGULAR:
- return SINGULAR_DATASET;
+ case SINGULAR_CAPITALIZED:
+ return modifier.fixup(SINGULAR_DATASET);
case PLURAL:
- return PLURAL_DATASET;
+ case PLURAL_CAPITALIZED:
+ return modifier.fixup(PLURAL_DATASET);
default:
throw new IllegalArgumentException("unknown modifier " + modifier);
}
case DATAVERSE:
switch (modifier) {
case NONE:
- return DATAVERSE;
+ case NONE_CAPITALIZED:
+ return modifier.fixup(PLAIN_DATAVERSE);
case SINGULAR:
- return SINGULAR_DATAVERSE;
+ case SINGULAR_CAPITALIZED:
+ return modifier.fixup(SINGULAR_DATAVERSE);
case PLURAL:
- return PLURAL_DATAVERSE;
+ case PLURAL_CAPITALIZED:
+ return modifier.fixup(PLURAL_DATAVERSE);
default:
throw new IllegalArgumentException("unknown modifier " + modifier);
}
+ case PRODUCT_NAME:
+ return modifier.fixup(DEFAULT_PRODUCT_NAME);
+ case PRODUCT_ABBREVIATION:
+ return modifier.fixup(DEFAULT_PRODUCT_ABBREVIATION);
default:
throw new IllegalArgumentException("unmapped identifier: " + identifier);
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
index 88b7190..addc192 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/IdentifierUtil.java
@@ -22,10 +22,22 @@
import static org.apache.asterix.common.api.IIdentifierMapper.Modifier;
import static org.apache.asterix.common.api.IIdentifierMapper.Modifier.NONE;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.asterix.common.api.IIdentifierMapper;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
public class IdentifierUtil {
- public static final String DATASET = "dataset";
- public static final String DATAVERSE = "dataverse";
+ private static final Logger LOGGER = LogManager.getLogger();
+ private static final Pattern MESSAGE_IDENTIFIERS = Pattern.compile("@([A-Z_]*)(:([A-Z_]*))?@");
+
+ public static final String DATASET = "DATASET";
+ public static final String DATAVERSE = "DATAVERSE";
+ public static final String PRODUCT_NAME = "PRODUCT_NAME";
+ public static final String PRODUCT_ABBREVIATION = "PRODUCT_ABBREVIATION";
public static String dataset() {
return IdentifierMappingUtil.map(DATASET, NONE);
@@ -38,4 +50,41 @@
public static String dataverse() {
return IdentifierMappingUtil.map(DATAVERSE, NONE);
}
+
+ public static String productName() {
+ return IdentifierMappingUtil.map(PRODUCT_NAME, NONE);
+ }
+
+ public static String productAbbreviation() {
+ return IdentifierMappingUtil.map(PRODUCT_ABBREVIATION, NONE);
+ }
+
+ public static String replaceIdentifiers(String input) {
+ if (input == null || input.isEmpty()) {
+ return input;
+ }
+ Matcher m = MESSAGE_IDENTIFIERS.matcher(input);
+ String replacement = m.replaceAll(mr -> {
+ String identifier = mr.group(1);
+ String modifierStr = mr.group(3);
+ IIdentifierMapper.Modifier modifier;
+ if (modifierStr != null) {
+ modifier = IIdentifierMapper.Modifier.valueOf(modifierStr);
+ } else {
+ modifier = IIdentifierMapper.Modifier.NONE;
+ }
+ return IdentifierMappingUtil.map(identifier, modifier);
+ });
+ if (!input.equals(replacement)) {
+ LOGGER.debug("{} -> {}", input, replacement);
+ }
+ return replacement;
+ }
+
+ public static String[] replaceIdentifiers(String[] input) {
+ for (int i = 0; i < input.length; i++) {
+ input[i] = IdentifierUtil.replaceIdentifiers(input[i]);
+ }
+ return input;
+ }
}
diff --git a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
index 4b7da0c..748a2b9 100644
--- a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
+++ b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
@@ -305,6 +305,11 @@
1198 = Duplicate field '%1$s' in primary key
1199 = Fields '%1$s' and '%2$s' are incompatible for primary key
1200 = Prefix should not start with "/". Prefix: '%1$s'
+1201 = Storage units expected for the field '%1$s' (e.g., 0.1KB, 100kb, 1mb, 3MB, 8.5GB ...). Provided '%2$s'
+1202 = Unsupported compression scheme %1$s. Supported schemes for %2$s are %3$s
+1203 = Invalid schema provided: '%1$s'
+1204 = '%1$s' type not supported in parquet format
+1205 = Invalid Parquet Writer Version provided '%1$s'. Supported values: %2$s
# Feed Errors
3001 = Illegal state.
diff --git a/asterixdb/asterix-external-data/pom.xml b/asterixdb/asterix-external-data/pom.xml
index 7f3c7ec..6a3f891 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -420,8 +420,8 @@
<artifactId>javax.activation</artifactId>
</dependency>
<dependency>
- <groupId>com.esri.geometry</groupId>
- <artifactId>esri-geometry-api</artifactId>
+ <groupId>org.locationtech.jts</groupId>
+ <artifactId>jts-core</artifactId>
</dependency>
<dependency>
<groupId>javax.xml.bind</groupId>
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index 3139be7..0407005 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -82,6 +82,11 @@
public static final String KEY_EXPRESSION = "expression";
public static final String KEY_LOCAL_SOCKET_PATH = "local-socket-path";
public static final String KEY_FORMAT = "format";
+ public static final String KEY_SCHEMA = "schema";
+ public static final String KEY_PARQUET_ROW_GROUP_SIZE = "row-group-size";
+ public static final String PARQUET_DEFAULT_ROW_GROUP_SIZE = "10MB";
+ public static final String KEY_PARQUET_PAGE_SIZE = "page-size";
+ public static final String PARQUET_DEFAULT_PAGE_SIZE = "8KB";
public static final String KEY_INCLUDE = "include";
public static final String KEY_EXCLUDE = "exclude";
public static final String KEY_QUOTE = "quote";
@@ -161,6 +166,8 @@
public static final String CLASS_NAME_PARQUET_INPUT_FORMAT =
"org.apache.asterix.external.input.record.reader.hdfs.parquet.MapredParquetInputFormat";
public static final String CLASS_NAME_HDFS_FILESYSTEM = "org.apache.hadoop.hdfs.DistributedFileSystem";
+ public static final String S3A_CHANGE_DETECTION_REQUIRED = "requireVersionChangeDetection";
+ public static final String S3A_CHANGE_DETECTION_REQUIRED_CONFIG_KEY = "fs.s3a.change.detection.version.required";
/**
* input formats aliases
*/
@@ -204,6 +211,13 @@
public static final String FORMAT_CSV = "csv";
public static final String FORMAT_TSV = "tsv";
public static final String FORMAT_PARQUET = "parquet";
+ public static final String PARQUET_SCHEMA_KEY = "parquet-schema";
+ public static final String PARQUET_WRITER_VERSION_KEY = "version";
+ public static final String PARQUET_WRITER_VERSION_VALUE_1 = "1";
+ public static final String PARQUET_WRITER_VERSION_VALUE_2 = "2";
+ public static final String DUMMY_DATABASE_NAME = "dbname";
+ public static final String DUMMY_TYPE_NAME = "typeName";
+ public static final String DUMMY_DATAVERSE_NAME = "a.b.c";
public static final String FORMAT_APACHE_ICEBERG = "apache-iceberg";
public static final Set<String> ALL_FORMATS;
public static final Set<String> TEXTUAL_FORMATS;
@@ -306,6 +320,8 @@
* Compression constants
*/
public static final String KEY_COMPRESSION_GZIP = "gzip";
+ public static final String KEY_COMPRESSION_SNAPPY = "snappy";
+ public static final String KEY_COMPRESSION_ZSTD = "zstd";
public static final String KEY_COMPRESSION_GZIP_COMPRESSION_LEVEL = "gzipCompressionLevel";
/**
@@ -318,13 +334,19 @@
public static final int WRITER_MAX_RESULT_MINIMUM = 1000;
public static final Set<String> WRITER_SUPPORTED_FORMATS;
public static final Set<String> WRITER_SUPPORTED_ADAPTERS;
- public static final Set<String> WRITER_SUPPORTED_COMPRESSION;
+ public static final Set<String> TEXTUAL_WRITER_SUPPORTED_COMPRESSION;
+ public static final Set<String> PARQUET_WRITER_SUPPORTED_COMPRESSION;
+ public static final Set<String> PARQUET_WRITER_SUPPORTED_VERSION;
+ public static final int PARQUET_DICTIONARY_PAGE_SIZE = 1048576;
static {
- WRITER_SUPPORTED_FORMATS = Set.of(FORMAT_JSON_LOWER_CASE);
+ WRITER_SUPPORTED_FORMATS = Set.of(FORMAT_JSON_LOWER_CASE, FORMAT_PARQUET);
WRITER_SUPPORTED_ADAPTERS = Set.of(ALIAS_LOCALFS_ADAPTER.toLowerCase(), KEY_ADAPTER_NAME_AWS_S3.toLowerCase(),
KEY_ADAPTER_NAME_GCS.toLowerCase());
- WRITER_SUPPORTED_COMPRESSION = Set.of(KEY_COMPRESSION_GZIP);
+ TEXTUAL_WRITER_SUPPORTED_COMPRESSION = Set.of(KEY_COMPRESSION_GZIP);
+ PARQUET_WRITER_SUPPORTED_COMPRESSION =
+ Set.of(KEY_COMPRESSION_GZIP, KEY_COMPRESSION_SNAPPY, KEY_COMPRESSION_ZSTD);
+ PARQUET_WRITER_SUPPORTED_VERSION = Set.of(PARQUET_WRITER_VERSION_VALUE_1, PARQUET_WRITER_VERSION_VALUE_2);
}
public static class ParquetOptions {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
index f7638b4..6bc013a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
@@ -218,6 +218,11 @@
configureParquet(configuration, conf);
}
+ if (configuration.containsKey(ExternalDataConstants.S3A_CHANGE_DETECTION_REQUIRED)) {
+ conf.set(ExternalDataConstants.S3A_CHANGE_DETECTION_REQUIRED_CONFIG_KEY,
+ configuration.get(ExternalDataConstants.S3A_CHANGE_DETECTION_REQUIRED));
+ }
+
return conf;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java
index 843600e..5059ec8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/WriterValidationUtil.java
@@ -20,7 +20,13 @@
import static org.apache.asterix.common.exceptions.ErrorCode.INVALID_REQ_PARAM_VAL;
import static org.apache.asterix.common.exceptions.ErrorCode.MINIMUM_VALUE_ALLOWED_FOR_PARAM;
+import static org.apache.asterix.common.exceptions.ErrorCode.PARAMETERS_REQUIRED;
+import static org.apache.asterix.external.util.ExternalDataConstants.FORMAT_JSON_LOWER_CASE;
+import static org.apache.asterix.external.util.ExternalDataConstants.FORMAT_PARQUET;
+import static org.apache.asterix.external.util.ExternalDataConstants.KEY_PARQUET_PAGE_SIZE;
+import static org.apache.asterix.external.util.ExternalDataConstants.KEY_PARQUET_ROW_GROUP_SIZE;
import static org.apache.asterix.external.util.ExternalDataConstants.KEY_WRITER_MAX_RESULT;
+import static org.apache.asterix.external.util.ExternalDataConstants.PARQUET_WRITER_VERSION_KEY;
import static org.apache.asterix.external.util.ExternalDataConstants.WRITER_MAX_RESULT_MINIMUM;
import java.util.List;
@@ -31,6 +37,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.util.StorageUtil;
public class WriterValidationUtil {
@@ -41,7 +48,6 @@
Map<String, String> configuration, SourceLocation sourceLocation) throws CompilationException {
validateAdapter(adapter, supportedAdapters, sourceLocation);
validateFormat(configuration, sourceLocation);
- validateCompression(configuration, sourceLocation);
validateMaxResult(configuration, sourceLocation);
}
@@ -56,14 +62,74 @@
String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
checkSupported(ExternalDataConstants.KEY_FORMAT, format, ExternalDataConstants.WRITER_SUPPORTED_FORMATS,
ErrorCode.UNSUPPORTED_WRITING_FORMAT, sourceLocation, false);
+ switch (format.toLowerCase()) {
+ case FORMAT_JSON_LOWER_CASE:
+ validateJSON(configuration, sourceLocation);
+ break;
+ case FORMAT_PARQUET:
+ validateParquet(configuration, sourceLocation);
+ break;
+ }
}
- private static void validateCompression(Map<String, String> configuration, SourceLocation sourceLocation)
+ private static void validateParquet(Map<String, String> configuration, SourceLocation sourceLocation)
+ throws CompilationException {
+ validateParquetCompression(configuration, sourceLocation);
+ validateParquetRowGroupSize(configuration);
+ validateParquetPageSize(configuration);
+ validateVersion(configuration, sourceLocation);
+ }
+
+ private static void validateVersion(Map<String, String> configuration, SourceLocation sourceLocation)
+ throws CompilationException {
+ String version = configuration.get(PARQUET_WRITER_VERSION_KEY);
+ checkSupported(PARQUET_WRITER_VERSION_KEY, version, ExternalDataConstants.PARQUET_WRITER_SUPPORTED_VERSION,
+ ErrorCode.INVALID_PARQUET_WRITER_VERSION, sourceLocation, true);
+ }
+
+ private static void validateParquetRowGroupSize(Map<String, String> configuration) throws CompilationException {
+ String rowGroupSize = configuration.get(KEY_PARQUET_ROW_GROUP_SIZE);
+ if (rowGroupSize == null)
+ return;
+ try {
+ StorageUtil.getByteValue(rowGroupSize);
+ } catch (IllegalArgumentException e) {
+ throw CompilationException.create(ErrorCode.ILLEGAL_SIZE_PROVIDED, KEY_PARQUET_ROW_GROUP_SIZE,
+ rowGroupSize);
+ }
+ }
+
+ private static void validateParquetPageSize(Map<String, String> configuration) throws CompilationException {
+ String pageSize = configuration.get(KEY_PARQUET_PAGE_SIZE);
+ if (pageSize == null)
+ return;
+ try {
+ StorageUtil.getByteValue(pageSize);
+ } catch (IllegalArgumentException e) {
+ throw CompilationException.create(ErrorCode.ILLEGAL_SIZE_PROVIDED, KEY_PARQUET_PAGE_SIZE, pageSize);
+ }
+ }
+
+ private static void validateJSON(Map<String, String> configuration, SourceLocation sourceLocation)
+ throws CompilationException {
+ validateTextualCompression(configuration, sourceLocation);
+ }
+
+ private static void validateParquetCompression(Map<String, String> configuration, SourceLocation sourceLocation)
throws CompilationException {
String compression = configuration.get(ExternalDataConstants.KEY_WRITER_COMPRESSION);
- checkSupported(ExternalDataConstants.KEY_WRITER_COMPRESSION, compression,
- ExternalDataConstants.WRITER_SUPPORTED_COMPRESSION, ErrorCode.UNKNOWN_COMPRESSION_SCHEME,
- sourceLocation, true);
+ checkCompressionSupported(ExternalDataConstants.KEY_WRITER_COMPRESSION, compression,
+ ExternalDataConstants.PARQUET_WRITER_SUPPORTED_COMPRESSION,
+ ErrorCode.UNSUPPORTED_WRITER_COMPRESSION_SCHEME, sourceLocation, FORMAT_PARQUET, true);
+ }
+
+ private static void validateTextualCompression(Map<String, String> configuration, SourceLocation sourceLocation)
+ throws CompilationException {
+ String compression = configuration.get(ExternalDataConstants.KEY_WRITER_COMPRESSION);
+ checkCompressionSupported(ExternalDataConstants.KEY_WRITER_COMPRESSION, compression,
+ ExternalDataConstants.TEXTUAL_WRITER_SUPPORTED_COMPRESSION,
+ ErrorCode.UNSUPPORTED_WRITER_COMPRESSION_SCHEME, sourceLocation,
+ configuration.get(ExternalDataConstants.KEY_FORMAT), true);
if (ExternalDataUtils.isGzipCompression(compression)) {
validateGzipCompressionLevel(configuration, sourceLocation);
}
@@ -94,7 +160,7 @@
}
if (value == null) {
- throw new CompilationException(ErrorCode.PARAMETERS_REQUIRED, sourceLocation, paramKey);
+ throw new CompilationException(PARAMETERS_REQUIRED, sourceLocation, paramKey);
}
String normalizedValue = value.toLowerCase();
@@ -121,4 +187,22 @@
}
}
+ private static void checkCompressionSupported(String paramKey, String value, Set<String> supportedSet,
+ ErrorCode errorCode, SourceLocation sourceLocation, String format, boolean optional)
+ throws CompilationException {
+ if (optional && value == null) {
+ return;
+ }
+
+ if (value == null) {
+ throw new CompilationException(PARAMETERS_REQUIRED, sourceLocation, paramKey);
+ }
+
+ String normalizedValue = value.toLowerCase();
+ if (!supportedSet.contains(normalizedValue)) {
+ List<String> sorted = supportedSet.stream().sorted().collect(Collectors.toList());
+ throw CompilationException.create(errorCode, sourceLocation, value, format, sorted.toString());
+ }
+ }
+
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java
index f2dbde7..6314ce8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/google/gcs/GCSConstants.java
@@ -26,6 +26,7 @@
public static final String APPLICATION_DEFAULT_CREDENTIALS_FIELD_NAME = "applicationDefaultCredentials";
public static final String JSON_CREDENTIALS_FIELD_NAME = "jsonCredentials";
public static final String ENDPOINT_FIELD_NAME = "endpoint";
+ public static final String STORAGE_PREFIX = "prefix";
/*
* Hadoop internal configuration
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinter.java
new file mode 100644
index 0000000..ba7a1ee
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinter.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.writer.printer;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.AsterixParquetRuntimeException;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.writer.printer.parquet.AsterixParquetWriter;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.MessageTypeParser;
+
+public class ParquetExternalFilePrinter implements IExternalPrinter {
+ private final IAType typeInfo;
+ private final CompressionCodecName compressionCodecName;
+ private MessageType schema;
+ private ParquetOutputFile parquetOutputFile;
+ private String parquetSchemaString;
+ private ParquetWriter<IValueReference> writer;
+ private final long rowGroupSize;
+ private final int pageSize;
+ private final ParquetProperties.WriterVersion writerVersion;
+
+ public ParquetExternalFilePrinter(CompressionCodecName compressionCodecName, String parquetSchemaString,
+ IAType typeInfo, long rowGroupSize, int pageSize, ParquetProperties.WriterVersion writerVersion) {
+ this.compressionCodecName = compressionCodecName;
+ this.parquetSchemaString = parquetSchemaString;
+ this.typeInfo = typeInfo;
+ this.rowGroupSize = rowGroupSize;
+ this.pageSize = pageSize;
+ this.writerVersion = writerVersion;
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ schema = MessageTypeParser.parseMessageType(parquetSchemaString);
+ }
+
+ @Override
+ public void newStream(OutputStream outputStream) throws HyracksDataException {
+ if (parquetOutputFile != null) {
+ close();
+ }
+ parquetOutputFile = new ParquetOutputFile(outputStream);
+ Configuration conf = new Configuration();
+
+ try {
+ writer = AsterixParquetWriter.builder(parquetOutputFile).withCompressionCodec(compressionCodecName)
+ .withType(schema).withTypeInfo(typeInfo).withRowGroupSize(rowGroupSize).withPageSize(pageSize)
+ .withDictionaryPageSize(ExternalDataConstants.PARQUET_DICTIONARY_PAGE_SIZE)
+ .enableDictionaryEncoding().withValidation(false).withWriterVersion(writerVersion).withConf(conf)
+ .build();
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ }
+
+ }
+
+ @Override
+ public void print(IValueReference value) throws HyracksDataException {
+ try {
+ this.writer.write(value);
+ } catch (AsterixParquetRuntimeException e) {
+ throw e.getHyracksDataException();
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ if (this.writer != null) {
+ try {
+ this.writer.close();
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ }
+ }
+ }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactory.java
new file mode 100644
index 0000000..5ccd2fe
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetExternalFilePrinterFactory.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer;
+
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.runtime.writer.IExternalPrinter;
+import org.apache.asterix.runtime.writer.IExternalPrinterFactory;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+
+public class ParquetExternalFilePrinterFactory implements IExternalPrinterFactory {
+ private static final long serialVersionUID = 8971234908711234L;
+ private final String parquetSchemaString;
+ private final IAType typeInfo;
+ private final CompressionCodecName compressionCodecName;
+ private final long rowGroupSize;
+ private final int pageSize;
+ private final ParquetProperties.WriterVersion writerVersion;
+
+ public ParquetExternalFilePrinterFactory(CompressionCodecName compressionCodecName, String parquetSchemaString,
+ IAType typeInfo, long rowGroupSize, int pageSize, ParquetProperties.WriterVersion writerVersion) {
+ this.compressionCodecName = compressionCodecName;
+ this.parquetSchemaString = parquetSchemaString;
+ this.typeInfo = typeInfo;
+ this.rowGroupSize = rowGroupSize;
+ this.pageSize = pageSize;
+ this.writerVersion = writerVersion;
+ }
+
+ @Override
+ public IExternalPrinter createPrinter() {
+ return new ParquetExternalFilePrinter(compressionCodecName, parquetSchemaString, typeInfo, rowGroupSize,
+ pageSize, writerVersion);
+ }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetOutputFile.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetOutputFile.java
new file mode 100644
index 0000000..5db600f
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/ParquetOutputFile.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.writer.printer;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.parquet.hadoop.util.HadoopStreams;
+import org.apache.parquet.io.OutputFile;
+import org.apache.parquet.io.PositionOutputStream;
+
+public class ParquetOutputFile implements OutputFile {
+ private final FSDataOutputStream fs;
+
+ /*
+ This class wraps OutputStream as a file that Parquet SDK supports writing to.
+ By default, this assumes output stream doesn't support block size which distributed file systems use.
+ Hadoop File System Library use this as a default block size
+ Ref : https://github.com/apache/hadoop/blob/74ff00705cf67911f1ff8320c6c97354350d6952/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java#L2756
+ */
+ private static final long DEFAULT_BLOCK_SIZE = 33554432L;
+
+ public ParquetOutputFile(OutputStream os) {
+ this.fs = new FSDataOutputStream(os, new FileSystem.Statistics("test"));
+ }
+
+ @Override
+ public PositionOutputStream create(long blockSizeHint) throws IOException {
+ return HadoopStreams.wrap(fs);
+ }
+
+ @Override
+ public PositionOutputStream createOrOverwrite(long blockSizeHint) throws IOException {
+ return HadoopStreams.wrap(fs);
+ }
+
+ @Override
+ public boolean supportsBlockSize() {
+ return false;
+ }
+
+ @Override
+ public long defaultBlockSize() {
+ return DEFAULT_BLOCK_SIZE;
+ }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetTypeMap.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetTypeMap.java
new file mode 100644
index 0000000..0dcdb3a
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetTypeMap.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.util.Map;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.PrimitiveType;
+
+public class AsterixParquetTypeMap {
+
+ public static final Map<ATypeTag, PrimitiveType.PrimitiveTypeName> PRIMITIVE_TYPE_NAME_MAP =
+ Map.ofEntries(Map.entry(ATypeTag.BOOLEAN, PrimitiveType.PrimitiveTypeName.BOOLEAN),
+ Map.entry(ATypeTag.STRING, PrimitiveType.PrimitiveTypeName.BINARY),
+ Map.entry(ATypeTag.TINYINT, PrimitiveType.PrimitiveTypeName.INT32),
+ Map.entry(ATypeTag.SMALLINT, PrimitiveType.PrimitiveTypeName.INT32),
+ Map.entry(ATypeTag.INTEGER, PrimitiveType.PrimitiveTypeName.INT32),
+ Map.entry(ATypeTag.BIGINT, PrimitiveType.PrimitiveTypeName.INT64),
+ Map.entry(ATypeTag.FLOAT, PrimitiveType.PrimitiveTypeName.FLOAT),
+ Map.entry(ATypeTag.DOUBLE, PrimitiveType.PrimitiveTypeName.DOUBLE),
+ Map.entry(ATypeTag.DATE, PrimitiveType.PrimitiveTypeName.INT32),
+ Map.entry(ATypeTag.TIME, PrimitiveType.PrimitiveTypeName.INT32),
+ Map.entry(ATypeTag.DATETIME, PrimitiveType.PrimitiveTypeName.INT64));
+
+ public static final Map<ATypeTag, LogicalTypeAnnotation> LOGICAL_TYPE_ANNOTATION_MAP =
+ Map.ofEntries(Map.entry(ATypeTag.STRING, LogicalTypeAnnotation.stringType()),
+ Map.entry(ATypeTag.DATE, LogicalTypeAnnotation.dateType()),
+ Map.entry(ATypeTag.TIME,
+ LogicalTypeAnnotation.timeType(true, LogicalTypeAnnotation.TimeUnit.MILLIS)),
+ Map.entry(ATypeTag.DATETIME,
+ LogicalTypeAnnotation.timestampType(true, LogicalTypeAnnotation.TimeUnit.MILLIS)));
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetWriter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetWriter.java
new file mode 100644
index 0000000..edeab1f
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/AsterixParquetWriter.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.om.types.IAType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.hadoop.api.WriteSupport;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
+import org.apache.parquet.io.OutputFile;
+import org.apache.parquet.schema.MessageType;
+
+public class AsterixParquetWriter extends ParquetWriter<IValueReference> {
+ public static Builder builder(Path file) {
+ return new Builder(file);
+ }
+
+ public static Builder builder(OutputFile file) {
+ return new Builder(file);
+ }
+
+ AsterixParquetWriter(Path file, WriteSupport<IValueReference> writeSupport,
+ CompressionCodecName compressionCodecName, int blockSize, int pageSize, boolean enableDictionary,
+ boolean enableValidation, ParquetProperties.WriterVersion writerVersion, Configuration conf)
+ throws IOException {
+ super(file, writeSupport, compressionCodecName, blockSize, pageSize, pageSize, enableDictionary,
+ enableValidation, writerVersion, conf);
+ }
+
+ public static class Builder extends ParquetWriter.Builder<IValueReference, Builder> {
+ private MessageType type;
+ private IAType typeInfo;
+ private Map<String, String> extraMetaData;
+
+ private Builder(Path file) {
+ super(file);
+ this.type = null;
+ this.extraMetaData = new HashMap();
+ }
+
+ private Builder(OutputFile file) {
+ super(file);
+ this.type = null;
+ this.extraMetaData = new HashMap();
+ }
+
+ public Builder withType(MessageType type) {
+ this.type = type;
+ return this;
+ }
+
+ public Builder withTypeInfo(IAType typeInfo) {
+ this.typeInfo = typeInfo;
+ return this;
+ }
+
+ public Builder withExtraMetaData(Map<String, String> extraMetaData) {
+ this.extraMetaData = extraMetaData;
+ return this;
+ }
+
+ protected Builder self() {
+ return this;
+ }
+
+ protected WriteSupport<IValueReference> getWriteSupport(Configuration conf) {
+ return new ObjectWriteSupport(this.type, this.typeInfo, this.extraMetaData);
+ }
+ }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/FieldNamesDictionary.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/FieldNamesDictionary.java
new file mode 100644
index 0000000..7058bf6
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/FieldNamesDictionary.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.om.dictionary.FieldNamesTrieDictionary;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.hyracks.util.string.UTF8StringUtil;
+
+public class FieldNamesDictionary {
+ private final FieldNamesTrieDictionary trie;
+ private final List<String> fieldNames;
+ private final StringBuilder builder;
+
+ public FieldNamesDictionary() {
+ trie = new FieldNamesTrieDictionary();
+ fieldNames = new ArrayList<>();
+ builder = new StringBuilder();
+ }
+
+ public String getOrCreateFieldNameIndex(IValueReference pointable) throws HyracksDataException {
+ int index = trie.getOrCreateFieldNameIndex(pointable);
+ if (index < fieldNames.size()) {
+ return fieldNames.get(index);
+ }
+
+ builder.setLength(0);
+ String fieldName = UTF8StringUtil.toString(pointable.getByteArray(), pointable.getStartOffset(), builder);
+ fieldNames.add(fieldName);
+ return fieldName;
+ }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ObjectWriteSupport.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ObjectWriteSupport.java
new file mode 100644
index 0000000..512b523
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ObjectWriteSupport.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.util.Map;
+
+import org.apache.asterix.external.input.record.reader.hdfs.parquet.AsterixParquetRuntimeException;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.hadoop.api.WriteSupport;
+import org.apache.parquet.io.api.RecordConsumer;
+import org.apache.parquet.schema.MessageType;
+
+public class ObjectWriteSupport extends WriteSupport<IValueReference> {
+ private MessageType schema;
+
+ private RecordConsumer recordConsumer;
+ private Map<String, String> extraMetaData;
+ ParquetRecordLazyVisitor parquetRecordLazyVisitor;
+
+ public ObjectWriteSupport(MessageType schema, IAType typeInfo, Map<String, String> extraMetaData) {
+ this.schema = schema;
+ this.extraMetaData = extraMetaData;
+ parquetRecordLazyVisitor = new ParquetRecordLazyVisitor(schema, typeInfo);
+ }
+
+ public String getName() {
+ return "asterix";
+ }
+
+ public WriteSupport.WriteContext init(Configuration configuration) {
+ return new WriteSupport.WriteContext(this.schema, this.extraMetaData);
+ }
+
+ public void prepareForWrite(RecordConsumer recordConsumer) {
+ this.recordConsumer = recordConsumer;
+ }
+
+ @Override
+ public void write(IValueReference valueReference) {
+ try {
+ parquetRecordLazyVisitor.consumeRecord(valueReference, recordConsumer);
+ } catch (HyracksDataException e) {
+ throw new AsterixParquetRuntimeException(e);
+ }
+ }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetRecordLazyVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetRecordLazyVisitor.java
new file mode 100644
index 0000000..f6e85ef
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetRecordLazyVisitor.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.ELEMENT_FIELD;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.GROUP_TYPE_ERROR_FIELD;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.LIST_FIELD;
+import static org.apache.asterix.external.writer.printer.parquet.ParquetValueWriter.PRIMITIVE_TYPE_ERROR_FIELD;
+
+import org.apache.asterix.om.lazy.AbstractLazyVisitablePointable;
+import org.apache.asterix.om.lazy.AbstractListLazyVisitablePointable;
+import org.apache.asterix.om.lazy.FlatLazyVisitablePointable;
+import org.apache.asterix.om.lazy.ILazyVisitablePointableVisitor;
+import org.apache.asterix.om.lazy.RecordLazyVisitablePointable;
+import org.apache.asterix.om.lazy.TypedRecordLazyVisitablePointable;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IValueReference;
+import org.apache.parquet.io.api.RecordConsumer;
+import org.apache.parquet.schema.GroupType;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.Type;
+
+public class ParquetRecordLazyVisitor implements ILazyVisitablePointableVisitor<Void, Type> {
+
+ private final MessageType schema;
+ private final IAType typeInfo;
+ private final RecordLazyVisitablePointable rec;
+ private RecordConsumer recordConsumer;
+ private FieldNamesDictionary fieldNamesDictionary;
+
+ private final ParquetValueWriter parquetValueWriter;
+
+ public ParquetRecordLazyVisitor(MessageType schema, IAType typeInfo) {
+ this.schema = schema;
+ this.typeInfo = typeInfo;
+ if (typeInfo.getTypeTag() == ATypeTag.OBJECT) {
+ this.rec = new TypedRecordLazyVisitablePointable((ARecordType) typeInfo);
+ } else if (typeInfo.getTypeTag() == ATypeTag.ANY) {
+ this.rec = new RecordLazyVisitablePointable(true);
+ } else {
+ throw new RuntimeException("Type Unsupported for parquet printing");
+ }
+ this.fieldNamesDictionary = new FieldNamesDictionary();
+ this.parquetValueWriter = new ParquetValueWriter();
+ }
+
+ public MessageType getSchema() {
+ return schema;
+ }
+
+ @Override
+ public Void visit(RecordLazyVisitablePointable pointable, Type type) throws HyracksDataException {
+
+ if (type.isPrimitive()) {
+ throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, GROUP_TYPE_ERROR_FIELD,
+ PRIMITIVE_TYPE_ERROR_FIELD, type.getName());
+ }
+ GroupType groupType = type.asGroupType();
+ recordConsumer.startGroup();
+
+ for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+ pointable.nextChild();
+ AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+ String columnName = fieldNamesDictionary.getOrCreateFieldNameIndex(pointable.getFieldName());
+
+ if (!groupType.containsField(columnName)) {
+ throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, columnName,
+ groupType.getName());
+ }
+ recordConsumer.startField(columnName, groupType.getFieldIndex(columnName));
+ child.accept(this, groupType.getType(columnName));
+ recordConsumer.endField(columnName, groupType.getFieldIndex(columnName));
+ }
+ recordConsumer.endGroup();
+ return null;
+ }
+
+ @Override
+ public Void visit(AbstractListLazyVisitablePointable pointable, Type type) throws HyracksDataException {
+
+ if (type.isPrimitive()) {
+ throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, GROUP_TYPE_ERROR_FIELD,
+ PRIMITIVE_TYPE_ERROR_FIELD, type.getName());
+ }
+ GroupType groupType = type.asGroupType();
+
+ if (!groupType.containsField(LIST_FIELD)) {
+ throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, LIST_FIELD,
+ groupType.getName());
+ }
+
+ if (groupType.getType(LIST_FIELD).isPrimitive()) {
+ throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, GROUP_TYPE_ERROR_FIELD,
+ PRIMITIVE_TYPE_ERROR_FIELD, LIST_FIELD);
+ }
+
+ GroupType listType = groupType.getType(LIST_FIELD).asGroupType();
+
+ if (!listType.containsField(ELEMENT_FIELD)) {
+ throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, ELEMENT_FIELD,
+ listType.getName());
+ }
+
+ recordConsumer.startGroup();
+
+ if (pointable.getNumberOfChildren() > 0) {
+ recordConsumer.startField(LIST_FIELD, groupType.getFieldIndex(LIST_FIELD));
+
+ for (int i = 0; i < pointable.getNumberOfChildren(); i++) {
+ pointable.nextChild();
+ AbstractLazyVisitablePointable child = pointable.getChildVisitablePointable();
+
+ recordConsumer.startGroup();
+ recordConsumer.startField(ELEMENT_FIELD, listType.getFieldIndex(ELEMENT_FIELD));
+ child.accept(this, listType.getType(ELEMENT_FIELD));
+ recordConsumer.endField(ELEMENT_FIELD, listType.getFieldIndex(ELEMENT_FIELD));
+ recordConsumer.endGroup();
+
+ }
+
+ recordConsumer.endField(LIST_FIELD, groupType.getFieldIndex(LIST_FIELD));
+ }
+
+ recordConsumer.endGroup();
+ return null;
+ }
+
+ @Override
+ public Void visit(FlatLazyVisitablePointable pointable, Type type) throws HyracksDataException {
+
+ if (!type.isPrimitive()) {
+ throw new HyracksDataException(ErrorCode.RESULT_DOES_NOT_FOLLOW_SCHEMA, PRIMITIVE_TYPE_ERROR_FIELD,
+ GROUP_TYPE_ERROR_FIELD, type.getName());
+ }
+ parquetValueWriter.addValueToColumn(recordConsumer, pointable, type.asPrimitiveType());
+ return null;
+ }
+
+ public void consumeRecord(IValueReference valueReference, RecordConsumer recordConsumer)
+ throws HyracksDataException {
+ rec.set(valueReference);
+ this.recordConsumer = recordConsumer;
+
+ recordConsumer.startMessage();
+ for (int i = 0; i < rec.getNumberOfChildren(); i++) {
+ rec.nextChild();
+ String columnName = fieldNamesDictionary.getOrCreateFieldNameIndex(rec.getFieldName());
+ AbstractLazyVisitablePointable child = rec.getChildVisitablePointable();
+
+ if (!schema.containsField(columnName)) {
+ throw new HyracksDataException(ErrorCode.EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA, columnName,
+ schema.getName());
+ }
+
+ recordConsumer.startField(columnName, schema.getFieldIndex(columnName));
+ child.accept(this, schema.getType(columnName));
+ recordConsumer.endField(columnName, schema.getFieldIndex(columnName));
+ }
+ recordConsumer.endMessage();
+ }
+
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetValueWriter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetValueWriter.java
new file mode 100644
index 0000000..206a3c9
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/ParquetValueWriter.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import java.io.IOException;
+
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.dataflow.data.nontagged.printers.PrintTools;
+import org.apache.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import org.apache.asterix.om.lazy.FlatLazyVisitablePointable;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.utils.ResettableByteArrayOutputStream;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+import org.apache.hyracks.util.string.UTF8StringUtil;
+import org.apache.parquet.io.api.Binary;
+import org.apache.parquet.io.api.RecordConsumer;
+import org.apache.parquet.schema.PrimitiveType;
+
+public class ParquetValueWriter {
+ public static final String LIST_FIELD = "list";
+ public static final String ELEMENT_FIELD = "element";
+
+ public static final String GROUP_TYPE_ERROR_FIELD = "group";
+ public static final String PRIMITIVE_TYPE_ERROR_FIELD = "primitive";
+
+ private final VoidPointable voidPointable;
+ private final ResettableByteArrayOutputStream byteArrayOutputStream;
+
+ ParquetValueWriter() {
+ this.voidPointable = VoidPointable.FACTORY.createPointable();
+ this.byteArrayOutputStream = new ResettableByteArrayOutputStream();
+ }
+
+ private void addIntegerType(long value, PrimitiveType.PrimitiveTypeName primitiveTypeName, ATypeTag typeTag,
+ RecordConsumer recordConsumer) throws HyracksDataException {
+ switch (primitiveTypeName) {
+ case INT32:
+ recordConsumer.addInteger((int) value);
+ break;
+ case INT64:
+ recordConsumer.addLong(value);
+ break;
+ case FLOAT:
+ recordConsumer.addFloat(value);
+ break;
+ case DOUBLE:
+ recordConsumer.addDouble(value);
+ break;
+ default:
+ throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, typeTag, primitiveTypeName);
+ }
+ }
+
+ public void addValueToColumn(RecordConsumer recordConsumer, FlatLazyVisitablePointable pointable,
+ PrimitiveType type) throws HyracksDataException {
+
+ ATypeTag typeTag = pointable.getTypeTag();
+ byte[] b = pointable.getByteArray();
+ int s, l;
+
+ if (pointable.isTagged()) {
+ s = pointable.getStartOffset() + 1;
+ l = pointable.getLength() - 1;
+ } else {
+ s = pointable.getStartOffset();
+ l = pointable.getLength();
+ }
+ voidPointable.set(b, s, l);
+
+ PrimitiveType.PrimitiveTypeName primitiveTypeName = type.getPrimitiveTypeName();
+
+ switch (typeTag) {
+ case TINYINT:
+ byte tinyIntValue = AInt8SerializerDeserializer.getByte(b, s);
+ addIntegerType(tinyIntValue, primitiveTypeName, typeTag, recordConsumer);
+ break;
+ case SMALLINT:
+ short smallIntValue = AInt16SerializerDeserializer.getShort(b, s);
+ addIntegerType(smallIntValue, primitiveTypeName, typeTag, recordConsumer);
+ break;
+ case INTEGER:
+ int intValue = AInt32SerializerDeserializer.getInt(b, s);
+ addIntegerType(intValue, primitiveTypeName, typeTag, recordConsumer);
+ break;
+ case BIGINT:
+ long bigIntValue = AInt64SerializerDeserializer.getLong(b, s);
+ addIntegerType(bigIntValue, primitiveTypeName, typeTag, recordConsumer);
+ break;
+ case FLOAT:
+ float floatValue = AFloatSerializerDeserializer.getFloat(b, s);
+ switch (primitiveTypeName) {
+ case INT32:
+ recordConsumer.addInteger((int) floatValue);
+ break;
+ case INT64:
+ recordConsumer.addLong((long) floatValue);
+ break;
+ case FLOAT:
+ recordConsumer.addFloat(floatValue);
+ break;
+ case DOUBLE:
+ recordConsumer.addDouble(floatValue);
+ break;
+ default:
+ throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, typeTag, primitiveTypeName);
+ }
+ break;
+ case DOUBLE:
+ double doubleValue = ADoubleSerializerDeserializer.getDouble(b, s);
+ switch (primitiveTypeName) {
+ case INT32:
+ recordConsumer.addInteger((int) doubleValue);
+ break;
+ case INT64:
+ recordConsumer.addLong((long) doubleValue);
+ break;
+ case FLOAT:
+ recordConsumer.addFloat((float) doubleValue);
+ break;
+ case DOUBLE:
+ recordConsumer.addDouble(doubleValue);
+ break;
+ default:
+ throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, typeTag, primitiveTypeName);
+ }
+ break;
+ case STRING:
+ int utfLength = UTF8StringUtil.getUTFLength(b, s);
+ if (primitiveTypeName == PrimitiveType.PrimitiveTypeName.BINARY) {
+ byteArrayOutputStream.reset();
+ try {
+ PrintTools.writeUTF8StringAsJSONUnquoted(b, s, l, utfLength, byteArrayOutputStream);
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ }
+ recordConsumer.addBinary(Binary.fromReusedByteArray(byteArrayOutputStream.getByteArray(), 0,
+ byteArrayOutputStream.getLength()));
+
+ } else {
+ throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, typeTag, primitiveTypeName);
+ }
+ break;
+ case BOOLEAN:
+ boolean booleanValue = ABooleanSerializerDeserializer.getBoolean(b, s);
+ if (primitiveTypeName == PrimitiveType.PrimitiveTypeName.BOOLEAN) {
+ recordConsumer.addBoolean(booleanValue);
+ } else {
+ throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, typeTag, primitiveTypeName);
+ }
+ break;
+ case DATE:
+ int dateValue = ADateSerializerDeserializer.getChronon(b, s);
+ addIntegerType(dateValue, primitiveTypeName, typeTag, recordConsumer);
+ break;
+ case TIME:
+ int timeValue = ATimeSerializerDeserializer.getChronon(b, s);
+ addIntegerType(timeValue, primitiveTypeName, typeTag, recordConsumer);
+ break;
+ case DATETIME:
+ long dateTimeValue = ADateTimeSerializerDeserializer.getChronon(b, s);
+ addIntegerType(dateTimeValue, primitiveTypeName, typeTag, recordConsumer);
+ case NULL:
+ case MISSING:
+ break;
+ default:
+ throw RuntimeDataException.create(ErrorCode.TYPE_MISMATCH_GENERIC, typeTag, primitiveTypeName);
+ }
+ }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaConverterVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaConverterVisitor.java
new file mode 100644
index 0000000..b25d1f5
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/writer/printer/parquet/SchemaConverterVisitor.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.writer.printer.parquet;
+
+import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.AbstractCollectionType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.om.types.IATypeVisitor;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.parquet.schema.LogicalTypeAnnotation;
+import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.PrimitiveType;
+import org.apache.parquet.schema.Types;
+
+public class SchemaConverterVisitor implements IATypeVisitor<Void, Pair<Types.Builder, String>> {
+ public static String MESSAGE_NAME = "asterix_schema";
+ private final ARecordType schemaType;
+ private ATypeTag unsupportedType;
+
+ private SchemaConverterVisitor(ARecordType schemaType) {
+ this.schemaType = schemaType;
+ this.unsupportedType = null;
+ }
+
+ public static String convertToParquetSchemaString(ARecordType schemaType) throws CompilationException {
+ SchemaConverterVisitor schemaConverterVisitor = new SchemaConverterVisitor(schemaType);
+ return schemaConverterVisitor.getParquetSchema().toString();
+ }
+
+ private MessageType getParquetSchema() throws CompilationException {
+ Types.MessageTypeBuilder builder = Types.buildMessage();
+
+ for (int i = 0; i < schemaType.getFieldNames().length; i++) {
+ String fieldName = schemaType.getFieldNames()[i];
+ IAType childType = schemaType.getFieldType(fieldName);
+ childType.accept(this, new Pair<>(builder, fieldName));
+ if (unsupportedType != null) {
+ throw new CompilationException(ErrorCode.TYPE_UNSUPPORTED_PARQUET_WRITE, unsupportedType.toString());
+ }
+ }
+ return builder.named(MESSAGE_NAME);
+ }
+
+ @Override
+ public Void visit(ARecordType recordType, Pair<Types.Builder, String> arg) {
+ Types.Builder builder = arg.first;
+ String fieldName = arg.second;
+
+ Types.BaseGroupBuilder childBuilder = getGroupChild(builder);
+ for (int i = 0; i < recordType.getFieldNames().length; i++) {
+ String childFieldName = recordType.getFieldNames()[i];
+ IAType childType = recordType.getFieldType(childFieldName);
+
+ childType.accept(this, new Pair<>(childBuilder, childFieldName));
+
+ }
+ childBuilder.named(fieldName);
+
+ return null;
+ }
+
+ @Override
+ public Void visit(AbstractCollectionType collectionType, Pair<Types.Builder, String> arg) {
+ Types.Builder builder = arg.first;
+ String fieldName = arg.second;
+
+ Types.BaseListBuilder childBuilder = getListChild(builder);
+ IAType child = collectionType.getItemType();
+ child.accept(this, new Pair<>(childBuilder, fieldName));
+
+ return null;
+ }
+
+ @Override
+ public Void visit(AUnionType unionType, Pair<Types.Builder, String> arg) {
+ // Shouldn't reach here.
+ return null;
+ }
+
+ @Override
+ public Void visitFlat(IAType flatType, Pair<Types.Builder, String> arg) {
+ Types.Builder builder = arg.first;
+ String fieldName = arg.second;
+
+ PrimitiveType.PrimitiveTypeName primitiveTypeName =
+ AsterixParquetTypeMap.PRIMITIVE_TYPE_NAME_MAP.get(flatType.getTypeTag());
+
+ if (primitiveTypeName == null) {
+ unsupportedType = flatType.getTypeTag();
+ }
+
+ LogicalTypeAnnotation logicalTypeAnnotation =
+ AsterixParquetTypeMap.LOGICAL_TYPE_ANNOTATION_MAP.get(flatType.getTypeTag());
+
+ getPrimitiveChild(builder, primitiveTypeName, logicalTypeAnnotation).named(fieldName);
+
+ return null;
+ }
+
+ private static Types.BaseGroupBuilder getGroupChild(Types.Builder parent) {
+ if (parent instanceof Types.BaseGroupBuilder) {
+ return ((Types.BaseGroupBuilder<?, ?>) parent).optionalGroup();
+ } else if (parent instanceof Types.BaseListBuilder) {
+ return ((Types.BaseListBuilder<?, ?>) parent).optionalGroupElement();
+ } else {
+ return null;
+ }
+ }
+
+ private static Types.BaseListBuilder getListChild(Types.Builder parent) {
+ if (parent instanceof Types.BaseGroupBuilder) {
+ return ((Types.BaseGroupBuilder<?, ?>) parent).optionalList();
+ } else if (parent instanceof Types.BaseListBuilder) {
+ return ((Types.BaseListBuilder<?, ?>) parent).optionalListElement();
+ } else {
+ return null;
+ }
+ }
+
+ private static Types.Builder getPrimitiveChild(Types.Builder parent, PrimitiveType.PrimitiveTypeName type,
+ LogicalTypeAnnotation annotation) {
+ if (parent instanceof Types.BaseGroupBuilder) {
+ return ((Types.BaseGroupBuilder<?, ?>) parent).optional(type).as(annotation);
+ } else if (parent instanceof Types.BaseListBuilder) {
+ return ((Types.BaseListBuilder<?, ?>) parent).optionalElement(type).as(annotation);
+ } else {
+ return null;
+ }
+ }
+
+}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
index 189c842..f6da684 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
@@ -45,8 +45,7 @@
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.junit.Assert;
import org.junit.Test;
-
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Point;
public class ADMDataParserTest {
@@ -192,10 +191,10 @@
SerializerDeserializerProvider.INSTANCE.getNonTaggedSerializerDeserializer(BuiltinType.AGEOMETRY);
Object point = geomDeser.deserialize(dataIn);
Assert.assertTrue(point instanceof AGeometry);
- Assert.assertTrue(((AGeometry) point).getGeometry() instanceof OGCPoint);
- OGCPoint p = (OGCPoint) ((AGeometry) point).getGeometry();
- Assert.assertEquals(3.0, p.X(), 1E-5);
- Assert.assertEquals(4.0, p.Y(), 1E-5);
+ Assert.assertTrue(((AGeometry) point).getGeometry() instanceof Point);
+ Point p = (Point) ((AGeometry) point).getGeometry();
+ Assert.assertEquals(3.0, p.getX(), 1E-5);
+ Assert.assertEquals(4.0, p.getY(), 1E-5);
} catch (IOException e) {
e.printStackTrace();
Assert.fail("Error in parsing");
diff --git a/asterixdb/asterix-geo/pom.xml b/asterixdb/asterix-geo/pom.xml
index e4cd8ff..7568d5d 100644
--- a/asterixdb/asterix-geo/pom.xml
+++ b/asterixdb/asterix-geo/pom.xml
@@ -121,8 +121,8 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>com.esri.geometry</groupId>
- <artifactId>esri-geometry-api</artifactId>
+ <groupId>org.locationtech.jts</groupId>
+ <artifactId>jts-core</artifactId>
</dependency>
</dependencies>
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java
index 4676851..5d64e9d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/aggregates/STUnionAggregateFunction.java
@@ -42,11 +42,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
/**
* STUnion aggregates a set of objects into one object. If the input is a set of overlapping polygons, their union is
@@ -55,14 +53,13 @@
* output is a GeometryCollection.
*/
public class STUnionAggregateFunction extends AbstractAggregateFunction {
- /**Use WGS 84 (EPSG:4326) as the default coordinate reference system*/
- public static final SpatialReference DEFAULT_CRS = SpatialReference.create(4326);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<AGeometry> geometrySerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AGEOMETRY);
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator eval;
- protected OGCGeometry geometry;
+ protected Geometry geometry;
+ protected GeometryFactory geometryFactory;
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@@ -75,7 +72,8 @@
@Override
public void init() throws HyracksDataException {
// Initialize the resulting geometry with an empty point.
- geometry = new OGCPoint(new Point(), DEFAULT_CRS);
+ geometryFactory = new GeometryFactory();
+ geometry = geometryFactory.createPoint((Coordinate) null);
}
@Override
@@ -91,7 +89,7 @@
processNull();
} else if (typeTag == ATypeTag.GEOMETRY) {
DataInput dataIn = new DataInputStream(new ByteArrayInputStream(data, offset + 1, len - 1));
- OGCGeometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+ Geometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
geometry = geometry.union(geometry1);
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java
index 97c30a1..8a85751 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTDoubleGeometryDescriptor.java
@@ -26,7 +26,6 @@
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.om.base.ABoolean;
-import org.apache.asterix.om.base.AGeometry;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.EnumDeserializer;
@@ -40,13 +39,11 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public abstract class AbstractSTDoubleGeometryDescriptor extends AbstractScalarFunctionDynamicDescriptor {
- abstract protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1)
- throws HyracksDataException;
+ abstract protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException;
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
@@ -106,13 +103,13 @@
}
DataInputStream dataIn0 = new DataInputStream(new ByteArrayInputStream(bytes0, offset0 + 1, len0 - 1));
- OGCGeometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
+ Geometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
DataInputStream dataIn1 = new DataInputStream(new ByteArrayInputStream(bytes1, offset1 + 1, len1 - 1));
- OGCGeometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
+ Geometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
Object finalResult = evaluateOGCGeometry(geometry0, geometry1);
- if (finalResult instanceof OGCGeometry) {
+ if (finalResult instanceof Geometry) {
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- AGeometrySerializerDeserializer.INSTANCE.serialize(new AGeometry((OGCGeometry) finalResult), out);
+ AGeometrySerializerDeserializer.INSTANCE.serialize((Geometry) finalResult, out);
} else if (finalResult instanceof Boolean) {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN)
.serialize((boolean) finalResult ? ABoolean.TRUE : ABoolean.FALSE, out);
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java
index 1420214..5d281ec 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTGeometryNDescriptor.java
@@ -25,7 +25,6 @@
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import org.apache.asterix.runtime.exceptions.InvalidDataFormatException;
@@ -38,14 +37,13 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public abstract class AbstractSTGeometryNDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- abstract protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException;
+ abstract protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException;
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
@@ -101,13 +99,13 @@
ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
DataInputStream dataIn = new DataInputStream(inStream);
- OGCGeometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+ Geometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
int n = (int) AInt64SerializerDeserializer.getLong(data0, offset0 + 1);
- OGCGeometry geometryN = evaluateOGCGeometry(geometry, n);
+ Geometry geometryN = evaluateOGCGeometry(geometry, n);
try {
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- AGeometrySerializerDeserializer.INSTANCE.serialize(new AGeometry(geometryN), out);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(geometryN, out);
result.set(resultStorage);
} catch (IOException e) {
throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java
index e41aebb..e5d9e38 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/AbstractSTSingleGeometryDescriptor.java
@@ -27,7 +27,6 @@
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.om.base.ABinary;
import org.apache.asterix.om.base.ABoolean;
-import org.apache.asterix.om.base.AGeometry;
import org.apache.asterix.om.base.AMutableInt32;
import org.apache.asterix.om.base.ARectangle;
import org.apache.asterix.om.types.ATypeTag;
@@ -44,12 +43,11 @@
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
import org.apache.hyracks.util.string.UTF8StringUtil;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public abstract class AbstractSTSingleGeometryDescriptor extends AbstractScalarFunctionDynamicDescriptor {
- abstract protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException;
+ abstract protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException;
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
@@ -99,7 +97,7 @@
}
DataInputStream dataIn0 = new DataInputStream(new ByteArrayInputStream(bytes0, offset0 + 1, len0 - 1));
- OGCGeometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
+ Geometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
Object finalResult = evaluateOGCGeometry(geometry0);
if (finalResult == null) {
@@ -120,9 +118,9 @@
} else if (finalResult instanceof byte[]) {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABINARY)
.serialize(new ABinary((byte[]) finalResult), out);
- } else if (finalResult instanceof OGCGeometry) {
+ } else if (finalResult instanceof Geometry) {
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- AGeometrySerializerDeserializer.INSTANCE.serialize(new AGeometry((OGCGeometry) finalResult), out);
+ AGeometrySerializerDeserializer.INSTANCE.serialize((Geometry) finalResult, out);
} else if (finalResult instanceof ARectangle) {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ARECTANGLE)
.serialize(finalResult, out);
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java
index 425a198..8df57e4 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/ParseGeoJSONDescriptor.java
@@ -23,9 +23,10 @@
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
-import java.nio.ByteBuffer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
import org.apache.asterix.geo.evaluators.GeoFunctionTypeInferers;
import org.apache.asterix.om.base.AOrderedList;
import org.apache.asterix.om.base.ARecord;
@@ -48,10 +49,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import org.locationtech.jts.geom.Geometry;
-import com.esri.core.geometry.MapOGCStructure;
-import com.esri.core.geometry.OperatorImportFromGeoJson;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.databind.ObjectMapper;
public class ParseGeoJSONDescriptor extends AbstractScalarFunctionDynamicDescriptor {
@@ -98,7 +98,7 @@
private DataOutput out;
private IPointable inputArg;
private IScalarEvaluator eval;
- private OperatorImportFromGeoJson geoJsonImporter;
+ private ObjectMapper objectMapper;
public ParseGeoJSONEvaluator(IScalarEvaluatorFactory factory, IEvaluatorContext ctx)
throws HyracksDataException {
@@ -106,7 +106,8 @@
out = resultStorage.getDataOutput();
inputArg = new VoidPointable();
eval = factory.createScalarEvaluator(ctx);
- geoJsonImporter = OperatorImportFromGeoJson.local();
+ objectMapper = new ObjectMapper();
+ objectMapper.registerModule(new JtsModule());
}
@Override
@@ -123,15 +124,10 @@
ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
DataInput dataIn = new DataInputStream(inStream);
try {
- String geometry = recordToString(new ARecordSerializerDeserializer(recType).deserialize(dataIn));
- MapOGCStructure structure = geoJsonImporter.executeOGC(0, geometry, null);
- OGCGeometry ogcGeometry =
- OGCGeometry.createFromOGCStructure(structure.m_ogcStructure, structure.m_spatialReference);
- ByteBuffer buffer = ogcGeometry.asBinary();
- byte[] wKBGeometryBuffer = buffer.array();
+ String geometryGeoJSON = recordToString(new ARecordSerializerDeserializer(recType).deserialize(dataIn));
+ Geometry geometry = objectMapper.readValue(geometryGeoJSON, Geometry.class);
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- out.writeInt(wKBGeometryBuffer.length);
- out.write(wKBGeometryBuffer);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(geometry, out);
result.set(resultStorage);
} catch (IOException e) {
throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java
index a8d983c..c10bbf2 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAreaDescriptor.java
@@ -21,12 +21,10 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptor;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STAreaDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -39,18 +37,15 @@
};
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- double area;
- if (!"GeometryCollection".equals(geometry.geometryType())) {
- area = geometry.getEsriGeometry().calculateArea2D();
- } else {
- GeometryCursor cursor = geometry.getEsriGeometryCursor();
- Geometry geometry1 = cursor.next();
- area = 0;
- while (geometry1 != null) {
- area += geometry1.calculateArea2D();
- geometry1 = cursor.next();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ double area = 0;
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+ for (int i = 0; i < geometry.getNumGeometries(); i++) {
+ Geometry subGeom = geometry.getGeometryN(i);
+ area += subGeom.getArea();
}
+ } else {
+ area = geometry.getArea();
}
return area;
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java
index e0e43bb..9ee77c1 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsBinaryDescriptor.java
@@ -18,12 +18,13 @@
*/
package org.apache.asterix.geo.evaluators.functions;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.WKBWriter;
public class STAsBinaryDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +32,10 @@
public static final IFunctionDescriptorFactory FACTORY = STAsBinaryDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.asBinary().array();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ WKBWriter wkbWriter = new WKBWriter(GeoFunctionUtils.getCoordinateDimension(geometry),
+ GeoFunctionUtils.LITTLE_ENDIAN_BYTEORDER);
+ return wkbWriter.write(geometry);
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java
index 760261d..f5d2217 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsGeoJSONDescriptor.java
@@ -18,21 +18,41 @@
*/
package org.apache.asterix.geo.evaluators.functions;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.locationtech.jts.geom.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
public class STAsGeoJSONDescriptor extends AbstractSTSingleGeometryDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = STAsGeoJSONDescriptor::new;
+ private static final Logger LOGGER = LogManager.getLogger();
+ private final ObjectMapper mapper;
+
+ public STAsGeoJSONDescriptor() {
+ mapper = new ObjectMapper();
+ }
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.asGeoJson();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ String geoJson = null;
+ try {
+ mapper.registerModule(new JtsModule());
+ geoJson = mapper.writeValueAsString(geometry);
+ } catch (JsonProcessingException e) {
+ LOGGER.debug("JSON Processing exception during STAsGeoJSON function");
+ throw HyracksDataException.create(ErrorCode.PARSING_ERROR);
+ }
+ return geoJson;
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java
index 12bdb30..372c73e 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STAsTextDescriptor.java
@@ -18,12 +18,13 @@
*/
package org.apache.asterix.geo.evaluators.functions;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.WKTWriter;
public class STAsTextDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +32,13 @@
public static final IFunctionDescriptorFactory FACTORY = STAsTextDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.asText();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ try {
+ WKTWriter wktWriter = new WKTWriter(GeoFunctionUtils.getCoordinateDimension(geometry));
+ return wktWriter.write(geometry);
+ } catch (Exception e) {
+ throw new HyracksDataException("Failed to convert geometry to WKT: " + e.getMessage(), e);
+ }
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java
index 9bd6ba1..5d6abaf 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STBoundaryDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STBoundaryDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +30,8 @@
public static final IFunctionDescriptorFactory FACTORY = STBoundaryDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.boundary();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ return geometry.getBoundary();
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java
index 34c21bf..e0461ff 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STContainsDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STContainsDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STContainsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.contains(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java
index a0c10f7..da8ee66 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCoordDimDescriptor.java
@@ -18,12 +18,12 @@
*/
package org.apache.asterix.geo.evaluators.functions;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STCoordDimDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +31,8 @@
public static final IFunctionDescriptorFactory FACTORY = STCoordDimDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.coordinateDimension();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ return GeoFunctionUtils.getCoordinateDimension(geometry);
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java
index c276e7e..f868354 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STCrossesDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STCrossesDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STCrossesDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.crosses(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java
index 8a49d4b..3ea165f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDifferenceDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STDifferenceDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STDifferenceDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.difference(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java
index 34d7282..38683a7 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDimensionDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STDimensionDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +30,8 @@
public static final IFunctionDescriptorFactory FACTORY = STDimensionDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.dimension();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ return geometry.getDimension();
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java
index 0a890b9..5317f49 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDisjointDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STDisjointDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STDisjointDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.disjoint(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java
index 75b8b41..b68a6af 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STDistanceDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STDistanceDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STDistanceDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.distance(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java
index ab0ba5c..36bc258 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEndPointDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
public class STEndPointDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STEndPointDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCCurve) {
- return ((OGCCurve) geometry).endPoint();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+ return ((LineString) geometry).getEndPoint();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java
index 13555c6..2ca68fd 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEnvelopeDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STEnvelopeDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +30,8 @@
public static final IFunctionDescriptorFactory FACTORY = STEnvelopeDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.envelope();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ return geometry.getEnvelope();
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java
index e0c0cf8..c40ee62 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STEqualsDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STEqualsDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STEqualsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.equals(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java
index 5888b63..71cb2b7 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STExteriorRingDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Polygon;
public class STExteriorRingDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STExteriorRingDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPolygon) {
- return ((OGCPolygon) geometry).exteriorRing();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+ return ((Polygon) geometry).getExteriorRing();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java
index 73272ae..fc097ce 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextDescriptor.java
@@ -22,8 +22,8 @@
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
-import java.nio.ByteBuffer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
@@ -40,12 +40,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.OGCStructure;
-import com.esri.core.geometry.OperatorImportFromWkt;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
public class STGeomFromTextDescriptor extends AbstractScalarFunctionDynamicDescriptor {
@@ -77,7 +74,6 @@
private DataOutput out;
private IPointable inputArg;
private IScalarEvaluator eval;
- private OperatorImportFromWkt wktImporter;
public STGeomFromTextEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
throws HyracksDataException {
@@ -85,7 +81,6 @@
out = resultStorage.getDataOutput();
inputArg = new VoidPointable();
eval = args[0].createScalarEvaluator(ctx);
- wktImporter = OperatorImportFromWkt.local();
}
@Override
@@ -102,19 +97,13 @@
ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
DataInputStream dataIn = new DataInputStream(inStream);
try {
- String geometry = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
- OGCStructure structure;
-
- structure = wktImporter.executeOGC(WktImportFlags.wktImportNonTrusted, geometry, null);
- OGCGeometry ogcGeometry = OGCGeometry.createFromOGCStructure(structure, SpatialReference.create(4326));
- ByteBuffer buffer = ogcGeometry.asBinary();
- byte[] wKBGeometryBuffer = buffer.array();
+ WKTReader reader = new WKTReader();
+ String wktString = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
+ Geometry geometry = reader.read(wktString);
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- out.writeInt(wKBGeometryBuffer.length);
- out.write(wKBGeometryBuffer);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(geometry, out);
result.set(resultStorage);
-
- } catch (IOException e) {
+ } catch (ParseException | IOException e) {
throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java
index aca726e..3a3dddb 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomFromTextSRIDDescriptor.java
@@ -22,9 +22,8 @@
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
-import java.nio.ByteBuffer;
-import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
@@ -41,12 +40,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.OGCStructure;
-import com.esri.core.geometry.OperatorImportFromWkt;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
public class STGeomFromTextSRIDDescriptor extends AbstractScalarFunctionDynamicDescriptor {
@@ -80,7 +76,6 @@
private IScalarEvaluator eval;
private IPointable inputArg0;
private IScalarEvaluator eval0;
- private OperatorImportFromWkt wktImporter;
public STGeomFromTextSRIDEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
throws HyracksDataException {
@@ -90,7 +85,6 @@
eval = args[0].createScalarEvaluator(ctx);
inputArg0 = new VoidPointable();
eval0 = args[1].createScalarEvaluator(ctx);
- wktImporter = OperatorImportFromWkt.local();
}
@Override
@@ -116,20 +110,13 @@
ByteArrayInputStream inStream = new ByteArrayInputStream(data, offset + 1, len - 1);
DataInputStream dataIn = new DataInputStream(inStream);
try {
- String geometry = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
- int srid = (int) AInt64SerializerDeserializer.getLong(data0, offset0 + 1);
- OGCStructure structure;
-
- structure = wktImporter.executeOGC(WktImportFlags.wktImportNonTrusted, geometry, null);
- OGCGeometry ogcGeometry = OGCGeometry.createFromOGCStructure(structure, SpatialReference.create(srid));
- ByteBuffer buffer = ogcGeometry.asBinary();
- byte[] wKBGeometryBuffer = buffer.array();
+ WKTReader reader = new WKTReader();
+ String wktString = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
+ Geometry geometry = reader.read(wktString);
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- out.writeInt(wKBGeometryBuffer.length);
- out.write(wKBGeometryBuffer);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(geometry, out);
result.set(resultStorage);
-
- } catch (IOException e) {
+ } catch (ParseException | IOException e) {
throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java
index 8a432d1..96e5931 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeomentryTypeDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STGeomentryTypeDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +30,8 @@
public static final IFunctionDescriptorFactory FACTORY = STGeomentryTypeDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.geometryType();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ return geometry.getGeometryType();
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java
index 5d9642e..4c8cc9a 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STGeometryNDescriptor.java
@@ -20,11 +20,10 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
+import org.locationtech.jts.geom.Geometry;
public class STGeometryNDescriptor extends AbstractSTGeometryNDescriptor {
@@ -38,12 +37,12 @@
}
@Override
- protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException {
- if (geometry instanceof OGCGeometryCollection) {
- return ((OGCGeometryCollection) geometry).geometryN(n);
+ protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+ return geometry.getGeometryN(n);
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java
index 5f6d956..9dc8bd9 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STInteriorRingNDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Polygon;
public class STInteriorRingNDescriptor extends AbstractSTGeometryNDescriptor {
@@ -38,12 +38,12 @@
}
@Override
- protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException {
- if (geometry instanceof OGCPolygon) {
- return ((OGCPolygon) geometry).interiorRingN(n);
+ protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+ return ((Polygon) geometry).getInteriorRingN(n);
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java
index 5a3d2bf..6888b2b 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectionDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STIntersectionDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STIntersectionDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.intersection(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java
index 34a1407..a8a4b78 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIntersectsDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STIntersectsDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STIntersectsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.intersects(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java
index ff55f9f..cedda05 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsClosedDescriptor.java
@@ -22,15 +22,10 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
-import com.esri.core.geometry.ogc.OGCMultiCurve;
-import com.esri.core.geometry.ogc.OGCMultiPoint;
-import com.esri.core.geometry.ogc.OGCMultiPolygon;
-import com.esri.core.geometry.ogc.OGCPoint;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
public class STIsClosedDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -38,30 +33,36 @@
public static final IFunctionDescriptorFactory FACTORY = STIsClosedDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
return isClosed(geometry);
}
- private boolean isClosed(OGCGeometry geometry) {
- if (geometry instanceof OGCCurve) {
- return ((OGCCurve) geometry).isClosed();
- } else if (geometry instanceof OGCMultiCurve) {
- return ((OGCMultiCurve) geometry).isClosed();
- } else if (geometry instanceof OGCMultiPoint || geometry instanceof OGCMultiPolygon
- || geometry instanceof OGCPolygon || geometry instanceof OGCPoint) {
- return true;
- } else if (geometry instanceof OGCGeometryCollection) {
- OGCGeometryCollection geometryCollection = (OGCGeometryCollection) geometry;
- int num = geometryCollection.numGeometries();
- for (int i = 0; i < num; ++i) {
- if (!isClosed(geometryCollection.geometryN(i))) {
- return false;
+ private boolean isClosed(Geometry geometry) {
+ switch (geometry.getGeometryType()) {
+ case Geometry.TYPENAME_LINESTRING:
+ return ((LineString) geometry).isClosed();
+
+ case Geometry.TYPENAME_MULTILINESTRING:
+ return ((MultiLineString) geometry).isClosed();
+
+ case Geometry.TYPENAME_MULTIPOINT:
+ case Geometry.TYPENAME_MULTIPOLYGON:
+ case Geometry.TYPENAME_POLYGON:
+ case Geometry.TYPENAME_POINT:
+ return true;
+
+ case Geometry.TYPENAME_GEOMETRYCOLLECTION:
+ GeometryCollection geometryCollection = (GeometryCollection) geometry;
+ int num = geometryCollection.getNumGeometries();
+ for (int i = 0; i < num; ++i) {
+ if (!isClosed(geometryCollection.getGeometryN(i))) {
+ return false;
+ }
}
- }
- return true;
- } else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ return true;
+
+ default:
+ throw new IllegalArgumentException("Unsupported geometry type: " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java
index 544fdf3..f6a8578 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsCollectionDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STIsCollectionDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +30,8 @@
public static final IFunctionDescriptorFactory FACTORY = STIsCollectionDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- String type = geometry.geometryType();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ String type = geometry.getGeometryType();
return "GeometryCollection".equals(type) || "MultiLineString".equals(type) || "MultiPoint".equals(type)
|| "MultiPolygon".equals(type);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java
index 1adcb0e..569806c 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsEmptyDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STIsEmptyDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STIsEmptyDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
return geometry.isEmpty();
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java
index 02ea049a..0c84c1c 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsRingDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
public class STIsRingDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STIsRingDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCCurve) {
- return ((OGCCurve) geometry).isRing();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+ return ((LineString) geometry).isRing();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java
index b8b2d63..2b6c468 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STIsSimpleDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STIsSimpleDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STIsSimpleDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
return geometry.isSimple();
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java
index 7357813..8909ed3 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLengthDescriptor.java
@@ -20,14 +20,12 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
-import com.esri.core.geometry.ogc.OGCMultiLineString;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
public class STLengthDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -35,21 +33,20 @@
public static final IFunctionDescriptorFactory FACTORY = STLengthDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCLineString) {
- return geometry.getEsriGeometry().calculateLength2D();
- } else if (geometry instanceof OGCMultiLineString) {
- GeometryCursor cursor = geometry.getEsriGeometryCursor();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+ return geometry.getLength();
+ } else if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTILINESTRING)) {
double length = 0;
- Geometry geometry1 = cursor.next();
- while (geometry1 != null) {
- length += geometry1.calculateLength2D();
- geometry1 = cursor.next();
+ MultiLineString multiLine = (MultiLineString) geometry;
+ for (int i = 0; i < multiLine.getNumGeometries(); i++) {
+ LineString lineString = (LineString) multiLine.getGeometryN(i);
+ length += lineString.getLength();
}
return length;
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java
index b716b5c..82e1c3d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STLineFromMultiPointDescriptor.java
@@ -20,35 +20,42 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.Polyline;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
-import com.esri.core.geometry.ogc.OGCMultiPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.MultiPoint;
public class STLineFromMultiPointDescriptor extends AbstractSTSingleGeometryDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = STLineFromMultiPointDescriptor::new;
+ private final GeometryFactory geometryFactory;
+
+ public STLineFromMultiPointDescriptor() {
+ geometryFactory = new GeometryFactory();
+ }
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCMultiPoint) {
- Polyline polyline = new Polyline();
- OGCMultiPoint multiPoint = (OGCMultiPoint) geometry;
- int numPoints = multiPoint.numGeometries();
- polyline.startPath((Point) multiPoint.geometryN(0).getEsriGeometry());
- for (int i = 1; i < numPoints; i++) {
- polyline.lineTo((Point) multiPoint.geometryN(i).getEsriGeometry());
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTIPOINT)) {
+ MultiPoint multiPoint = (MultiPoint) geometry;
+ int numPoints = multiPoint.getNumGeometries();
+
+ if (numPoints < 2) {
+ throw new UnsupportedOperationException("A LineString requires at least two points.");
}
- return new OGCLineString(polyline, 0, SpatialReference.create(4326));
+
+ Coordinate[] coordinates = new Coordinate[numPoints];
+ for (int i = 0; i < numPoints; i++) {
+ coordinates[i] = multiPoint.getGeometryN(i).getCoordinate();
+ }
+ return geometryFactory.createLineString(coordinates);
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java
index 55e358c..6353918 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBRDescriptor.java
@@ -25,9 +25,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
public class STMBRDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -41,14 +40,12 @@
};
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
-
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
AMutableRectangle aRectangle = new AMutableRectangle(null, null);
AMutablePoint[] aPoint = { new AMutablePoint(0, 0), new AMutablePoint(0, 0) };
- Envelope env = new Envelope();
- geometry.getEsriGeometry().queryEnvelope(env);
- aPoint[0].setValue(env.getXMin(), env.getYMin());
- aPoint[1].setValue(env.getXMax(), env.getYMax());
+ Envelope envelope = geometry.getEnvelopeInternal();
+ aPoint[0].setValue(envelope.getMinX(), envelope.getMinY());
+ aPoint[1].setValue(envelope.getMaxX(), envelope.getMaxY());
aRectangle.setValue(aPoint[0], aPoint[1]);
return aRectangle;
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java
index b3bfb33..50ddd28 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMBREnlargeDescriptor.java
@@ -46,9 +46,9 @@
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.data.std.util.ByteArrayAccessibleInputStream;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
public class STMBREnlargeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
@@ -61,6 +61,12 @@
}
};
+ private final GeometryFactory geometryFactory;
+
+ public STMBREnlargeDescriptor() {
+ geometryFactory = new GeometryFactory();
+ }
+
@Override
public FunctionIdentifier getIdentifier() {
return BuiltinFunctions.ST_MBR_ENLARGE;
@@ -114,14 +120,13 @@
}
inStream.setContent(data0, offset0 + 1, len - 1);
- OGCGeometry geometry =
- AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
- geometry.getEsriGeometry().queryEnvelope(env);
+ Geometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+ Envelope env = geometry.getEnvelopeInternal();
double expandValue =
ATypeHierarchy.getDoubleValue(getIdentifier().getName(), 0, data1, offset1);
AMutableRectangle expandedMBR = new AMutableRectangle(
- new AMutablePoint(env.getXMin() - expandValue, env.getYMin() - expandValue),
- new AMutablePoint(env.getXMax() + expandValue, env.getYMax() + expandValue));
+ new AMutablePoint(env.getMinX() - expandValue, env.getMinY() - expandValue),
+ new AMutablePoint(env.getMaxX() + expandValue, env.getMaxY() + expandValue));
rectangleSerde.serialize(expandedMBR, out);
result.set(resultStorage);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java
index e208771..71fcacf 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMDescriptor.java
@@ -20,24 +20,36 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
public class STMDescriptor extends AbstractSTSingleGeometryDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = STMDescriptor::new;
+ private static final Logger LOGGER = LogManager.getLogger();
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPoint) {
- return ((OGCPoint) geometry).M();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+ Point point = (Point) geometry;
+ Coordinate coordinate = point.getCoordinate();
+ if (coordinate instanceof CoordinateXYZM) {
+ return coordinate.getM();
+ } else {
+ LOGGER.debug("The provided point does not have an M value.");
+ return Double.NaN;
+ }
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java
index 3aa9e7b..f8cc270 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakeEnvelopeDescriptorSRID.java
@@ -20,8 +20,8 @@
import java.io.DataOutput;
import java.io.IOException;
-import java.nio.ByteBuffer;
+import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.ATypeTag;
@@ -35,10 +35,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Polygon;
public class STMakeEnvelopeDescriptorSRID extends AbstractGetValDescriptor {
@@ -70,14 +69,19 @@
private DataOutput out;
private IPointable inputArg0;
private IScalarEvaluator eval0;
+ private Coordinate coordinate0;
private IPointable inputArg1;
private IScalarEvaluator eval1;
+ private Coordinate coordinate1;
private IPointable inputArg2;
private IScalarEvaluator eval2;
+ private Coordinate coordinate2;
private IPointable inputArg3;
private IScalarEvaluator eval3;
+ private Coordinate coordinate3;
private IPointable inputArg4;
private IScalarEvaluator eval4;
+ private final GeometryFactory geometryFactory;
public STMakeEnvelopeEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
throws HyracksDataException {
@@ -93,6 +97,11 @@
eval3 = args[3].createScalarEvaluator(ctx);
inputArg4 = new VoidPointable();
eval4 = args[4].createScalarEvaluator(ctx);
+ geometryFactory = new GeometryFactory();
+ coordinate0 = new Coordinate();
+ coordinate1 = new Coordinate();
+ coordinate2 = new Coordinate();
+ coordinate3 = new Coordinate();
}
@Override
@@ -116,23 +125,30 @@
byte[] data3 = inputArg3.getByteArray();
int offset3 = inputArg3.getStartOffset();
+ //Spatial Reference System Identifier (SRID), currently not used.
eval4.evaluate(tuple, inputArg4);
byte[] data4 = inputArg4.getByteArray();
int offset4 = inputArg4.getStartOffset();
try {
+ double xmin = getVal(data0, offset0);
+ double ymin = getVal(data1, offset1);
+ double xmax = getVal(data2, offset2);
+ double ymax = getVal(data3, offset3);
+ coordinate0.setX(xmin);
+ coordinate0.setY(ymin);
+ coordinate1.setX(xmin);
+ coordinate1.setY(ymax);
+ coordinate2.setX(xmax);
+ coordinate2.setY(ymax);
+ coordinate3.setX(xmax);
+ coordinate3.setY(ymin);
- OGCGeometry ogcGeometry =
- OGCGeometry
- .createFromEsriGeometry(
- new Envelope(getVal(data0, offset0), getVal(data1, offset1),
- getVal(data2, offset2), getVal(data3, offset3)),
- SpatialReference.create((int) getVal(data4, offset4)));
- ByteBuffer buffer = ogcGeometry.asBinary();
- byte[] bytes = buffer.array();
+ Coordinate[] coords =
+ new Coordinate[] { coordinate0, coordinate1, coordinate2, coordinate3, coordinate0 };
+ Polygon polygon = geometryFactory.createPolygon(coords);
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- out.writeInt(bytes.length);
- out.write(bytes);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(polygon, out);
result.set(resultStorage);
} catch (IOException e) {
throw new InvalidDataFormatException(sourceLoc, getIdentifier(), e,
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java
index 4a5cf7e..8009510 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DDescriptor.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.ATypeTag;
@@ -35,10 +34,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
public class STMakePoint3DDescriptor extends AbstractGetValDescriptor {
@@ -73,7 +71,7 @@
private final IScalarEvaluator eval1;
private final IScalarEvaluator eval2;
private Point point;
- private AGeometry pointGeometry;
+ private final GeometryFactory geometryFactory;
public STMakePoint3DEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
throws HyracksDataException {
@@ -85,8 +83,7 @@
eval0 = args[0].createScalarEvaluator(ctx);
eval1 = args[1].createScalarEvaluator(ctx);
eval2 = args[2].createScalarEvaluator(ctx);
- point = new Point(0, 0, 0);
- pointGeometry = new AGeometry(new OGCPoint(point, SpatialReference.create(4326)));
+ geometryFactory = new GeometryFactory();
}
@Override
@@ -105,10 +102,10 @@
resultStorage.reset();
try {
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- point.setX(getVal(bytes0, offset0));
- point.setY(getVal(bytes1, offset1));
- point.setZ(getVal(bytes2, offset2));
- AGeometrySerializerDeserializer.INSTANCE.serialize(pointGeometry, out);
+ Coordinate coordinate =
+ new Coordinate(getVal(bytes0, offset0), getVal(bytes1, offset1), getVal(bytes2, offset2));
+ point = geometryFactory.createPoint(coordinate);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(point, out);
} catch (IOException e1) {
throw HyracksDataException.create(e1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java
index 55baa37..edc0cdc 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePoint3DWithMDescriptor.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.ATypeTag;
@@ -35,11 +34,15 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCPoint;
-
+/**
+ * TODO: Support writing geometry with 4 dimensions
+ * JTS WKBWriter currently does not support writing 4 dimensions and loses the detail.
+ * See https://github.com/locationtech/jts/issues/733 for more details.
+ */
public class STMakePoint3DWithMDescriptor extends AbstractGetValDescriptor {
private static final long serialVersionUID = 1L;
@@ -75,7 +78,7 @@
private final IScalarEvaluator eval2;
private final IScalarEvaluator eval3;
private Point point;
- private AGeometry pointGeometry;
+ private final GeometryFactory geometryFactory;
public STMakePoint3DWithMEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx)
throws HyracksDataException {
@@ -89,8 +92,7 @@
eval1 = args[1].createScalarEvaluator(ctx);
eval2 = args[2].createScalarEvaluator(ctx);
eval3 = args[3].createScalarEvaluator(ctx);
- point = new Point(0, 0, 0);
- pointGeometry = new AGeometry(new OGCPoint(point, SpatialReference.create(4326)));
+ geometryFactory = new GeometryFactory();
}
@Override
@@ -112,11 +114,10 @@
resultStorage.reset();
try {
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- point.setX(getVal(bytes0, offset0));
- point.setY(getVal(bytes1, offset1));
- point.setZ(getVal(bytes2, offset2));
- point.setM(getVal(bytes3, offset3));
- AGeometrySerializerDeserializer.INSTANCE.serialize(pointGeometry, out);
+ CoordinateXYZM coordinate = new CoordinateXYZM(getVal(bytes0, offset0), getVal(bytes1, offset1),
+ getVal(bytes2, offset2), getVal(bytes3, offset3));
+ point = geometryFactory.createPoint(coordinate);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(point, out);
} catch (IOException e1) {
throw HyracksDataException.create(e1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java
index c3b7b36..f21b381 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STMakePointDescriptor.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
-import org.apache.asterix.om.base.AGeometry;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.ATypeTag;
@@ -35,10 +34,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
public class STMakePointDescriptor extends AbstractGetValDescriptor {
@@ -71,7 +69,7 @@
private final IScalarEvaluator eval0;
private final IScalarEvaluator eval1;
private Point point;
- private AGeometry pointGeometry;
+ private final GeometryFactory geometryFactory;
public STMakePointEvaluator(IScalarEvaluatorFactory[] args, IEvaluatorContext ctx) throws HyracksDataException {
resultStorage = new ArrayBackedValueStorage();
@@ -80,8 +78,7 @@
inputArg1 = new VoidPointable();
eval0 = args[0].createScalarEvaluator(ctx);
eval1 = args[1].createScalarEvaluator(ctx);
- point = new Point(0, 0);
- pointGeometry = new AGeometry(new OGCPoint(point, SpatialReference.create(4326)));
+ geometryFactory = new GeometryFactory();
}
@Override
@@ -97,9 +94,9 @@
resultStorage.reset();
try {
out.writeByte(ATypeTag.SERIALIZED_GEOMETRY_TYPE_TAG);
- point.setX(getVal(bytes0, offset0));
- point.setY(getVal(bytes1, offset1));
- AGeometrySerializerDeserializer.INSTANCE.serialize(pointGeometry, out);
+ Coordinate coordinate = new Coordinate(getVal(bytes0, offset0), getVal(bytes1, offset1));
+ point = geometryFactory.createPoint(coordinate);
+ AGeometrySerializerDeserializer.INSTANCE.serialize(point, out);
} catch (IOException e1) {
throw HyracksDataException.create(e1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java
index 309d325..c80c513 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNPointsDescriptor.java
@@ -20,14 +20,13 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.GeometryCursor;
-import com.esri.core.geometry.MultiVertexGeometry;
-import com.esri.core.geometry.Point;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Polygon;
public class STNPointsDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -35,31 +34,45 @@
public static final IFunctionDescriptorFactory FACTORY = STNPointsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- Geometry esriGeometry = geometry.getEsriGeometry();
- if (esriGeometry instanceof MultiVertexGeometry) {
- return ((MultiVertexGeometry) esriGeometry).getPointCount();
- } else if (esriGeometry instanceof Point) {
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (geometry == null) {
+ return 0;
+ }
+ if (geometry.isEmpty()) {
+ return 0;
+ }
+
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT))
return 1;
- } else if (esriGeometry == null) {
- int count = 0;
- GeometryCursor geometryCursor = geometry.getEsriGeometryCursor();
- esriGeometry = geometryCursor.next();
- while (esriGeometry != null) {
- if (esriGeometry instanceof MultiVertexGeometry) {
- count += ((MultiVertexGeometry) esriGeometry).getPointCount();
- } else if (esriGeometry instanceof Point) {
- count += 1;
- }
- esriGeometry = geometryCursor.next();
+
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+ Polygon polygon = (Polygon) geometry;
+ int count = polygon.getExteriorRing().getCoordinates().length - 1;
+ for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
+ count += polygon.getInteriorRingN(i).getCoordinates().length - 1;
}
return count;
- } else if (geometry.isEmpty()) {
- return 0;
- } else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
}
+
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTIPOLYGON)) {
+ int count = 0;
+ MultiPolygon multiPolygon = (MultiPolygon) geometry;
+ for (int i = 0; i < multiPolygon.getNumGeometries(); i++) {
+ count += (int) evaluateOGCGeometry(multiPolygon.getGeometryN(i));
+ }
+ return count;
+ }
+
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+ int count = 0;
+ GeometryCollection collection = (GeometryCollection) geometry;
+ for (int i = 0; i < collection.getNumGeometries(); i++) {
+ count += (int) evaluateOGCGeometry(collection.getGeometryN(i));
+ }
+ return count;
+ }
+
+ return geometry.getCoordinates().length;
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java
index f8e313d..ee21878 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNRingsDescriptor.java
@@ -20,12 +20,12 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCMultiPolygon;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Polygon;
public class STNRingsDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -33,22 +33,22 @@
public static final IFunctionDescriptorFactory FACTORY = STNRingsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPolygon) {
- return ((OGCPolygon) geometry).numInteriorRing() + 1;
- } else if (geometry instanceof OGCMultiPolygon) {
- OGCMultiPolygon polygon = (OGCMultiPolygon) geometry;
- int numGeometries = polygon.numGeometries();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+ return ((Polygon) geometry).getNumInteriorRing() + 1;
+ } else if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_MULTIPOLYGON)) {
+ MultiPolygon polygon = (MultiPolygon) geometry;
+ int numGeometries = polygon.getNumGeometries();
int count = 0;
for (int i = 1; i < numGeometries + 1; i++) {
- if (polygon.geometryN(i) instanceof OGCPolygon) {
- count += ((OGCPolygon) polygon.geometryN(i)).numInteriorRing() + 1;
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+ count += ((Polygon) polygon.getGeometryN(i)).getNumInteriorRing() + 1;
}
}
return count;
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java
index e87d1f4..938f591 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumGeometriesDescriptor.java
@@ -20,11 +20,10 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
+import org.locationtech.jts.geom.Geometry;
public class STNumGeometriesDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,9 +31,9 @@
public static final IFunctionDescriptorFactory FACTORY = STNumGeometriesDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCGeometryCollection) {
- return ((OGCGeometryCollection) geometry).numGeometries();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_GEOMETRYCOLLECTION)) {
+ return geometry.getNumGeometries();
} else if (!geometry.isEmpty()) {
return 1;
} else {
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java
index 19f2e11..c4f97f3 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STNumInteriorRingsDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPolygon;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Polygon;
public class STNumInteriorRingsDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STNumInteriorRingsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPolygon) {
- return ((OGCPolygon) geometry).numInteriorRing();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POLYGON)) {
+ return ((Polygon) geometry).getNumInteriorRing();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java
index 61286b0..509566a 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STOverlapsDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STOverlapsDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STOverlapsDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.overlaps(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java
index e0495f5..1689f9d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPointNDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCLineString;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
public class STPointNDescriptor extends AbstractSTGeometryNDescriptor {
@@ -38,12 +38,12 @@
}
@Override
- protected OGCGeometry evaluateOGCGeometry(OGCGeometry geometry, int n) throws HyracksDataException {
- if (geometry instanceof OGCLineString) {
- return ((OGCLineString) geometry).pointN(n);
+ protected Geometry evaluateOGCGeometry(Geometry geometry, int n) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+ return ((LineString) geometry).getPointN(n);
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java
index e2fcd8a..3683e1b 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STPolygonizeDescriptor.java
@@ -49,11 +49,9 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.ogc.OGCConcreteGeometryCollection;
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCGeometryCollection;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.GeometryFactory;
public class STPolygonizeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
@@ -61,6 +59,12 @@
private static final long serialVersionUID = 1L;
+ private final GeometryFactory geometryFactory;
+
+ public STPolygonizeDescriptor() {
+ geometryFactory = new GeometryFactory();
+ }
+
@Override
public FunctionIdentifier getIdentifier() {
return BuiltinFunctions.ST_POLYGONIZE;
@@ -116,13 +120,13 @@
ByteArrayInputStream inStream = new ByteArrayInputStream(bytes, offset + 1, len - 1);
DataInputStream dataIn = new DataInputStream(inStream);
IACursor cursor = ((IACollection) serde.deserialize(dataIn)).getCursor();
- List<OGCGeometry> list = new ArrayList<>();
+ List<Geometry> list = new ArrayList<>();
while (cursor.next()) {
IAObject object = cursor.get();
list.add(((AGeometry) object).getGeometry());
}
- OGCGeometryCollection geometryCollection =
- new OGCConcreteGeometryCollection(list, SpatialReference.create(4326));
+ GeometryCollection geometryCollection =
+ geometryFactory.createGeometryCollection(list.toArray(new Geometry[0]));
try {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AGEOMETRY)
.serialize(new AGeometry(geometryCollection), out);
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java
index c3dc132..bc9c0b6 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STRelateDescriptor.java
@@ -44,8 +44,7 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STRelateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
@@ -128,9 +127,9 @@
DataInputStream dataIn = new DataInputStream(inStream);
String matrix = AStringSerializerDeserializer.INSTANCE.deserialize(dataIn).getStringValue();
DataInputStream dataIn0 = new DataInputStream(new ByteArrayInputStream(bytes0, offset0 + 1, len0 - 1));
- OGCGeometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
+ Geometry geometry0 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn0).getGeometry();
DataInputStream dataIn1 = new DataInputStream(new ByteArrayInputStream(bytes1, offset1 + 1, len1 - 1));
- OGCGeometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
+ Geometry geometry1 = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn1).getGeometry();
try {
boolean val = geometry0.relate(geometry1, matrix);
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN)
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java
index e50678a..e19f21a 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSRIDDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STSRIDDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +30,8 @@
public static final IFunctionDescriptorFactory FACTORY = STSRIDDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.SRID();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ return geometry.getSRID();
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java
index 127b0d5..71d3470 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STStartPointDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCCurve;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.LineString;
public class STStartPointDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STStartPointDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCCurve) {
- return ((OGCCurve) geometry).startPoint();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_LINESTRING)) {
+ return ((LineString) geometry).getStartPoint();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java
index e4d0507..b04254e 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STSymDifferenceDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STSymDifferenceDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STSymDifferenceDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.symDifference(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java
index ddf6550..0615f61 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STTouchesDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STTouchesDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STTouchesDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.touches(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java
index 1cfea10..533a3c1 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STUnionDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STUnionDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STUnionDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.union(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java
index c78e48f..a3687ac 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STWithinDescriptor.java
@@ -22,8 +22,7 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class STWithinDescriptor extends AbstractSTDoubleGeometryDescriptor {
@@ -31,7 +30,7 @@
public static final IFunctionDescriptorFactory FACTORY = STWithinDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry0, OGCGeometry geometry1) throws HyracksDataException {
+ protected Object evaluateOGCGeometry(Geometry geometry0, Geometry geometry1) throws HyracksDataException {
return geometry0.within(geometry1);
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java
index 608e55cc..2bb450f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
public class STXDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STXDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPoint) {
- return ((OGCPoint) geometry).X();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+ return ((Point) geometry).getX();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java
index 2431f3f..1ad10ba 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMaxDescriptor.java
@@ -22,10 +22,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
public class STXMaxDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -33,15 +31,13 @@
public static final IFunctionDescriptorFactory FACTORY = STXMaxDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- Geometry esriGeom = geometry.getEsriGeometry();
- if (esriGeom != null) {
- Envelope env = new Envelope();
- esriGeom.queryEnvelope(env);
- return env.getXMax();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ Envelope env = geometry.getEnvelopeInternal();
+ if (env != null) {
+ return env.getMaxX();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java
index 66db0c6..ad47b30 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STXMinDescriptor.java
@@ -22,10 +22,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
public class STXMinDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -33,17 +31,14 @@
public static final IFunctionDescriptorFactory FACTORY = STXMinDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- Geometry esriGeom = geometry.getEsriGeometry();
- if (esriGeom != null) {
- Envelope env = new Envelope();
- esriGeom.queryEnvelope(env);
- return env.getXMin();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ Envelope env = geometry.getEnvelopeInternal();
+ if (env != null) {
+ return env.getMinX();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
-
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java
index 32baa0a..df5a57d 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYDescriptor.java
@@ -20,11 +20,11 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
public class STYDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -32,12 +32,12 @@
public static final IFunctionDescriptorFactory FACTORY = STYDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPoint) {
- return ((OGCPoint) geometry).Y();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+ return ((Point) geometry).getY();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java
index 7b82915..b5aee1f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMaxDescriptor.java
@@ -22,10 +22,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
public class STYMaxDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -33,15 +31,13 @@
public static final IFunctionDescriptorFactory FACTORY = STYMaxDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- Geometry esriGeom = geometry.getEsriGeometry();
- if (esriGeom != null) {
- Envelope env = new Envelope();
- esriGeom.queryEnvelope(env);
- return env.getYMax();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ Envelope env = geometry.getEnvelopeInternal();
+ if (env != null) {
+ return env.getMaxY();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java
index 1cc5e88..921761f 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STYMinDescriptor.java
@@ -22,10 +22,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.Envelope;
-import com.esri.core.geometry.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
public class STYMinDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -33,15 +31,13 @@
public static final IFunctionDescriptorFactory FACTORY = STYMinDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- Geometry esriGeom = geometry.getEsriGeometry();
- if (esriGeom != null) {
- Envelope env = new Envelope();
- esriGeom.queryEnvelope(env);
- return env.getYMin();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ Envelope env = geometry.getEnvelopeInternal();
+ if (env != null) {
+ return env.getMinY();
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java
index 739775a..eae150c 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZDescriptor.java
@@ -20,24 +20,32 @@
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
-import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.Point;
public class STZDescriptor extends AbstractSTSingleGeometryDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = STZDescriptor::new;
+ private static final Logger LOGGER = LogManager.getLogger();
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- if (geometry instanceof OGCPoint) {
- return ((OGCPoint) geometry).Z();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ if (StringUtils.equals(geometry.getGeometryType(), Geometry.TYPENAME_POINT)) {
+ Point point = (Point) geometry;
+ double z = point.getCoordinate().getZ();
+ if (Double.isNaN(z)) {
+ LOGGER.debug("No Z-coordinate available for the given point.");
+ }
+ return z;
} else {
- throw new UnsupportedOperationException(
- "The operation " + getIdentifier() + " is not supported for the type " + geometry.geometryType());
+ throw new UnsupportedOperationException("The operation " + getIdentifier()
+ + " is not supported for the type " + geometry.getGeometryType());
}
}
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java
index cb3f54e..fa80241 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMaxDescriptor.java
@@ -22,8 +22,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
public class STZMaxDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +31,18 @@
public static final IFunctionDescriptorFactory FACTORY = STZMaxDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.MaxZ();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ double maxZ = Double.NEGATIVE_INFINITY;
+ boolean foundZ = false;
+ for (Coordinate coord : geometry.getCoordinates()) {
+ if (!Double.isNaN(coord.getZ())) {
+ foundZ = true;
+ if (coord.getZ() > maxZ) {
+ maxZ = coord.getZ();
+ }
+ }
+ }
+ return foundZ ? maxZ : Double.NaN;
}
@Override
diff --git a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java
index 3f6f6b3..ba637d4 100644
--- a/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java
+++ b/asterixdb/asterix-geo/src/main/java/org/apache/asterix/geo/evaluators/functions/STZMinDescriptor.java
@@ -22,8 +22,8 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.Geometry;
public class STZMinDescriptor extends AbstractSTSingleGeometryDescriptor {
@@ -31,8 +31,19 @@
public static final IFunctionDescriptorFactory FACTORY = STZMinDescriptor::new;
@Override
- protected Object evaluateOGCGeometry(OGCGeometry geometry) throws HyracksDataException {
- return geometry.MinZ();
+ protected Object evaluateOGCGeometry(Geometry geometry) throws HyracksDataException {
+ double minZ = Double.POSITIVE_INFINITY;
+ boolean foundZ = false;
+ for (Coordinate coord : geometry.getCoordinates()) {
+ if (!Double.isNaN(coord.getZ())) {
+ foundZ = true;
+ if (coord.getZ() < minZ) {
+ minZ = coord.getZ();
+ }
+ }
+ }
+
+ return foundZ ? minZ : Double.NaN;
}
@Override
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java
index 599d528..5bc4a71 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CopyToStatement.java
@@ -32,6 +32,8 @@
import org.apache.asterix.lang.common.base.IReturningStatement;
import org.apache.asterix.lang.common.clause.OrderbyClause;
import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.RecordTypeDefinition;
+import org.apache.asterix.lang.common.expression.TypeExpression;
import org.apache.asterix.lang.common.expression.VariableExpr;
import org.apache.asterix.lang.common.literal.StringLiteral;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
@@ -52,13 +54,14 @@
private List<Expression> partitionExpressions;
private List<Expression> orderByList;
private int varCounter;
+ private RecordTypeDefinition itemType;
public CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
ExternalDetailsDecl externalDetailsDecl, int varCounter, List<Expression> keyExpressions,
boolean autogenerated) {
this(namespace, datasetName, query, sourceVariable, externalDetailsDecl, new ArrayList<>(), new ArrayList<>(),
new HashMap<>(), new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), varCounter, keyExpressions,
- autogenerated);
+ autogenerated, null);
}
public CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
@@ -68,7 +71,18 @@
List<OrderbyClause.NullOrderModifier> orderByNullModifierList, int varCounter) {
this(namespace, datasetName, query, sourceVariable, externalDetailsDecl, pathExpressions, partitionExpressions,
partitionsVariables, orderbyList, orderByModifiers, orderByNullModifierList, varCounter,
- new ArrayList<>(), false);
+ new ArrayList<>(), false, null);
+ }
+
+ public CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
+ ExternalDetailsDecl externalDetailsDecl, List<Expression> pathExpressions,
+ List<Expression> partitionExpressions, Map<Integer, VariableExpr> partitionsVariables,
+ List<Expression> orderbyList, List<OrderbyClause.OrderModifier> orderByModifiers,
+ List<OrderbyClause.NullOrderModifier> orderByNullModifierList, int varCounter,
+ RecordTypeDefinition itemType) {
+ this(namespace, datasetName, query, sourceVariable, externalDetailsDecl, pathExpressions, partitionExpressions,
+ partitionsVariables, orderbyList, orderByModifiers, orderByNullModifierList, varCounter,
+ new ArrayList<>(), false, itemType);
}
private CopyToStatement(Namespace namespace, String datasetName, Query query, VariableExpr sourceVariable,
@@ -76,7 +90,7 @@
List<Expression> partitionExpressions, Map<Integer, VariableExpr> partitionsVariables,
List<Expression> orderbyList, List<OrderbyClause.OrderModifier> orderByModifiers,
List<OrderbyClause.NullOrderModifier> orderByNullModifierList, int varCounter,
- List<Expression> keyExpressions, boolean autogenerated) {
+ List<Expression> keyExpressions, boolean autogenerated, RecordTypeDefinition itemType) {
this.namespace = namespace;
this.datasetName = datasetName;
this.query = query;
@@ -91,6 +105,7 @@
this.varCounter = varCounter;
this.keyExpressions = keyExpressions != null ? keyExpressions : new ArrayList<>();
this.autogenerated = autogenerated;
+ this.itemType = itemType;
if (pathExpressions.isEmpty()) {
// Ensure path expressions to have at least an empty string
@@ -117,6 +132,10 @@
this.namespace = namespace;
}
+ public RecordTypeDefinition getType() {
+ return itemType;
+ }
+
public Namespace getNamespace() {
return namespace;
}
@@ -192,6 +211,10 @@
return !orderByList.isEmpty();
}
+ public TypeExpression getItemType() {
+ return itemType;
+ }
+
@Override
public int getVarCounter() {
return varCounter;
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
index da5f0ab..1cfa892 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
+++ b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
@@ -2936,6 +2936,7 @@
Namespace namespace = nameComponents == null ? null : nameComponents.first;
String datasetName = nameComponents == null ? null : nameComponents.second.getValue();
List<Expression> pathExprs;
+ RecordTypeDefinition typeExpr = null;
List<Expression> partitionExprs = new ArrayList<Expression>();
Map<Integer, VariableExpr> partitionVarExprs = new HashMap<Integer, VariableExpr>();
@@ -2947,6 +2948,7 @@
<TO> adapterName = AdapterName()
<PATH> <LEFTPAREN> pathExprs = ExpressionList() <RIGHTPAREN>
(CopyToOverClause(partitionExprs, partitionVarExprs, orderbyList, orderbyModifierList, orderbyNullModifierList))?
+ (<TYPE> <LEFTPAREN> typeExpr = RecordTypeDef() <RIGHTPAREN>)?
<WITH> withRecord = RecordConstructor()
{
ExternalDetailsDecl edd = new ExternalDetailsDecl();
@@ -2961,8 +2963,7 @@
usedAlias = new VariableExpr(SqlppVariableUtil.toInternalVariableIdentifier(datasetName));
}
- CopyToStatement stmt = new CopyToStatement(namespace, datasetName, query, usedAlias, edd, pathExprs,
- partitionExprs, partitionVarExprs, orderbyList, orderbyModifierList, orderbyNullModifierList, getVarCounter());
+ CopyToStatement stmt = new CopyToStatement(namespace, datasetName, query, usedAlias, edd, pathExprs, partitionExprs, partitionVarExprs, orderbyList, orderbyModifierList, orderbyNullModifierList, getVarCounter(), typeExpr);
return addSourceLocation(stmt, startToken);
}
}
@@ -4407,7 +4408,7 @@
(
<MUL> { star = true; }
|
- ( expr1 = Expression() ( <COLON> { slice = true; } ( expr2 = Expression() )? )? )
+ ( (expr1 = Expression()) ? ( <COLON> { slice = true; } ( expr2 = Expression() )? )? )
)
<RIGHTBRACKET>
{
@@ -4418,10 +4419,14 @@
ensureIntegerLiteral( (LiteralExpr) expr2, "Index");
}
AbstractAccessor resultAccessor;
- if (slice) {
+
+ if (star) {
+ resultAccessor = new IndexAccessor(inputExpr, IndexAccessor.IndexKind.STAR, null);
+ } else if (slice || expr1 == null) {
+ if (expr1 == null) {
+ expr1 = new LiteralExpr(new LongIntegerLiteral(0L));
+ }
resultAccessor = new ListSliceExpression(inputExpr, expr1, expr2);
- } else if (star) {
- resultAccessor = new IndexAccessor(inputExpr, IndexAccessor.IndexKind.STAR, null);
} else {
resultAccessor = new IndexAccessor(inputExpr, IndexAccessor.IndexKind.ELEMENT, expr1);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
index aeb6d13..76b8a0b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
@@ -178,13 +178,13 @@
}
public void dropDatabase(String databaseName) {
- Database database = new Database(databaseName, false, MetadataUtil.PENDING_NO_OP);
+ Database database = new Database(databaseName, false, null, MetadataUtil.PENDING_NO_OP);
droppedCache.addDatabaseIfNotExists(database);
logAndApply(new MetadataLogicalOperation(database, false));
}
public void dropDataverse(String database, DataverseName dataverseName) {
- Dataverse dataverse = new Dataverse(database, dataverseName, null, MetadataUtil.PENDING_NO_OP);
+ Dataverse dataverse = new Dataverse(database, dataverseName, null, MetadataUtil.PENDING_NO_OP, null);
droppedCache.addDataverseIfNotExists(dataverse);
logAndApply(new MetadataLogicalOperation(dataverse, false));
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
index 7b33682..7f60345 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
@@ -30,19 +30,20 @@
public class MetadataBuiltinEntities {
//--------------------------------------- Databases ----------------------------------------//
- public static final Database SYSTEM_DATABASE =
- new Database(MetadataConstants.SYSTEM_DATABASE, true, MetadataUtil.PENDING_NO_OP);
+ public static final Database SYSTEM_DATABASE = new Database(MetadataConstants.SYSTEM_DATABASE, true,
+ MetadataConstants.DEFAULT_OWNER, MetadataUtil.PENDING_NO_OP);
- public static final Database DEFAULT_DATABASE =
- new Database(MetadataConstants.DEFAULT_DATABASE, false, MetadataUtil.PENDING_NO_OP);
+ public static final Database DEFAULT_DATABASE = new Database(MetadataConstants.DEFAULT_DATABASE, false,
+ MetadataConstants.DEFAULT_OWNER, MetadataUtil.PENDING_NO_OP);
//--------------------------------------- Dataverses ----------------------------------------//
- public static final Dataverse METADATA_DATAVERSE =
- new Dataverse(MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME,
- NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, MetadataUtil.PENDING_NO_OP);
+ public static final Dataverse METADATA_DATAVERSE = new Dataverse(MetadataConstants.SYSTEM_DATABASE,
+ MetadataConstants.METADATA_DATAVERSE_NAME, NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT,
+ MetadataUtil.PENDING_NO_OP, MetadataConstants.DEFAULT_OWNER);
- public static final Dataverse DEFAULT_DATAVERSE = new Dataverse(MetadataConstants.DEFAULT_DATABASE,
- MetadataConstants.DEFAULT_DATAVERSE_NAME, NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+ public static final Dataverse DEFAULT_DATAVERSE =
+ new Dataverse(MetadataConstants.DEFAULT_DATABASE, MetadataConstants.DEFAULT_DATAVERSE_NAME,
+ NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP, MetadataConstants.DEFAULT_OWNER);
//--------------------------------------- Datatypes -----------------------------------------//
public static final Datatype ANY_OBJECT_DATATYPE =
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
index c35c03a..a6b3863 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -130,6 +130,9 @@
public static final String FIELD_NAME_FULL_TEXT_FILTER_PIPELINE = "FullTextFilterPipeline";
public static final String FIELD_NAME_FULL_TEXT_STOPWORD_LIST = "StopwordList";
+ //open field
+ public static final String FIELD_NAME_OWNER_NAME = "OwnerName";
+
//---------------------------------- Record Types Creation ----------------------------------//
//--------------------------------------- Properties ----------------------------------------//
public static final int PROPERTIES_NAME_FIELD_INDEX = 0;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java
index 8be93e3..e734e51 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Database.java
@@ -33,11 +33,13 @@
private final String databaseName;
private final boolean isSystemDatabase;
+ private final String owner;
private final int pendingOp;
- public Database(String databaseName, boolean isSystemDatabase, int pendingOp) {
+ public Database(String databaseName, boolean isSystemDatabase, String owner, int pendingOp) {
this.databaseName = databaseName;
this.isSystemDatabase = isSystemDatabase;
+ this.owner = owner;
this.pendingOp = pendingOp;
}
@@ -49,6 +51,10 @@
return isSystemDatabase;
}
+ public String getOwnerName() {
+ return owner;
+ }
+
public int getPendingOp() {
return pendingOp;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
index bba1763..a07a495 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
@@ -35,12 +35,14 @@
private final DataverseName dataverseName;
private final String dataFormat;
private final int pendingOp;
+ private final String ownerName;
- public Dataverse(String databaseName, DataverseName dataverseName, String format, int pendingOp) {
+ public Dataverse(String databaseName, DataverseName dataverseName, String format, int pendingOp, String ownerName) {
this.databaseName = Objects.requireNonNull(databaseName);
this.dataverseName = dataverseName;
this.dataFormat = format;
this.pendingOp = pendingOp;
+ this.ownerName = ownerName;
}
public String getDatabaseName() {
@@ -59,6 +61,10 @@
return pendingOp;
}
+ public String getOwnerName() {
+ return ownerName;
+ }
+
@Override
public Dataverse addToCache(MetadataCache cache) {
return cache.addDataverseIfNotExists(this);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java
index 9752dc8..9a5a5d8 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatabaseTupleTranslator.java
@@ -21,13 +21,16 @@
import java.util.Calendar;
+import org.apache.asterix.common.metadata.MetadataConstants;
import org.apache.asterix.metadata.bootstrap.DatabaseEntity;
+import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Database;
import org.apache.asterix.om.base.ABoolean;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AMutableInt32;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -50,12 +53,20 @@
@Override
protected Database createMetadataEntityFromARecord(ARecord databaseRecord) throws AlgebricksException {
+ ARecordType recType = databaseRecord.getType();
String databaseName =
((AString) databaseRecord.getValueByPos(databaseEntity.databaseNameIndex())).getStringValue();
boolean isSystemDatabase =
((ABoolean) databaseRecord.getValueByPos(databaseEntity.systemDatabaseIndex())).getBoolean();
+ int ownerIndex = recType.getFieldIndex(MetadataRecordTypes.FIELD_NAME_OWNER_NAME);
+ String ownerName;
+ if (ownerIndex >= 0) {
+ ownerName = ((AString) databaseRecord.getValueByPos(ownerIndex)).getStringValue();
+ } else {
+ ownerName = MetadataConstants.DEFAULT_OWNER;
+ }
int pendingOp = ((AInt32) databaseRecord.getValueByPos(databaseEntity.pendingOpIndex())).getIntegerValue();
- return new Database(databaseName, isSystemDatabase, pendingOp);
+ return new Database(databaseName, isSystemDatabase, ownerName, pendingOp);
}
@Override
@@ -94,6 +105,9 @@
int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(databaseEntity.pendingOpIndex(), fieldValue);
+ // write open fields
+ writeOpenFields(database);
+
// write the payload record
recordBuilder.write(tupleBuilder.getDataOutput(), true);
tupleBuilder.addFieldEndOffset();
@@ -101,4 +115,20 @@
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
+
+ protected void writeOpenFields(Database database) throws HyracksDataException {
+ writeDatabaseOwner(database);
+ }
+
+ private void writeDatabaseOwner(Database database) throws HyracksDataException {
+ if (databaseEntity.databaseNameIndex() >= 0) {
+ fieldValue.reset();
+ aString.setValue(MetadataRecordTypes.FIELD_NAME_OWNER_NAME);
+ stringSerde.serialize(aString, fieldName.getDataOutput());
+ fieldValue.reset();
+ aString.setValue(database.getOwnerName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(fieldName, fieldValue);
+ }
+ }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index 42fcfbb..e2d097b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -22,13 +22,16 @@
import java.util.Calendar;
import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.common.metadata.MetadataConstants;
import org.apache.asterix.common.metadata.MetadataUtil;
import org.apache.asterix.metadata.bootstrap.DataverseEntity;
+import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AMutableInt32;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -51,20 +54,29 @@
@Override
protected Dataverse createMetadataEntityFromARecord(ARecord dataverseRecord) throws AlgebricksException {
+ ARecordType recType = dataverseRecord.getType();
+
String dataverseCanonicalName =
((AString) dataverseRecord.getValueByPos(dataverseEntity.dataverseNameIndex())).getStringValue();
DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
int databaseNameIndex = dataverseEntity.databaseNameIndex();
- String databaseName;
+ int ownerNameIndex = recType.getFieldIndex(MetadataRecordTypes.FIELD_NAME_OWNER_NAME);
+ String databaseName, ownerName;
if (databaseNameIndex >= 0) {
databaseName = ((AString) dataverseRecord.getValueByPos(databaseNameIndex)).getStringValue();
} else {
databaseName = MetadataUtil.databaseFor(dataverseName);
}
+
+ if (ownerNameIndex >= 0) {
+ ownerName = ((AString) dataverseRecord.getValueByPos(ownerNameIndex)).getStringValue();
+ } else {
+ ownerName = MetadataConstants.DEFAULT_OWNER;
+ }
String format = ((AString) dataverseRecord.getValueByPos(dataverseEntity.dataFormatIndex())).getStringValue();
int pendingOp = ((AInt32) dataverseRecord.getValueByPos(dataverseEntity.pendingOpIndex())).getIntegerValue();
- return new Dataverse(databaseName, dataverseName, format, pendingOp);
+ return new Dataverse(databaseName, dataverseName, format, pendingOp, ownerName);
}
@Override
@@ -115,6 +127,9 @@
int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(dataverseEntity.pendingOpIndex(), fieldValue);
+ // write open fields
+ writeOpenFields(dataverse);
+
// write record
recordBuilder.write(tupleBuilder.getDataOutput(), true);
tupleBuilder.addFieldEndOffset();
@@ -122,4 +137,20 @@
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
+
+ protected void writeOpenFields(Dataverse dataverse) throws HyracksDataException {
+ writeDataverseOwner(dataverse);
+ }
+
+ private void writeDataverseOwner(Dataverse dataverse) throws HyracksDataException {
+ if (dataverseEntity.databaseNameIndex() >= 0) {
+ fieldValue.reset();
+ aString.setValue(MetadataRecordTypes.FIELD_NAME_OWNER_NAME);
+ stringSerde.serialize(aString, fieldName.getDataOutput());
+ fieldValue.reset();
+ aString.setValue(dataverse.getOwnerName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(fieldName, fieldValue);
+ }
+ }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java
index b8583d0..ee7b3fc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/provider/ExternalWriterProvider.java
@@ -25,14 +25,17 @@
import org.apache.asterix.cloud.writer.GCSExternalFileWriterFactory;
import org.apache.asterix.cloud.writer.S3ExternalFileWriterFactory;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.ExternalDataUtils;
import org.apache.asterix.external.writer.LocalFSExternalFileWriterFactory;
import org.apache.asterix.external.writer.compressor.GzipExternalFileCompressStreamFactory;
import org.apache.asterix.external.writer.compressor.IExternalFileCompressStreamFactory;
import org.apache.asterix.external.writer.compressor.NoOpExternalFileCompressStreamFactory;
+import org.apache.asterix.external.writer.printer.ParquetExternalFilePrinterFactory;
import org.apache.asterix.external.writer.printer.TextualExternalFilePrinterFactory;
import org.apache.asterix.formats.nontagged.CleanJSONPrinterFactoryProvider;
+import org.apache.asterix.om.types.IAType;
import org.apache.asterix.runtime.writer.ExternalFileWriterConfiguration;
import org.apache.asterix.runtime.writer.IExternalFileWriterFactory;
import org.apache.asterix.runtime.writer.IExternalFileWriterFactoryProvider;
@@ -41,6 +44,9 @@
import org.apache.hyracks.algebricks.data.IPrinterFactory;
import org.apache.hyracks.api.exceptions.SourceLocation;
import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.util.StorageUtil;
+import org.apache.parquet.column.ParquetProperties;
+import org.apache.parquet.hadoop.metadata.CompressionCodecName;
public class ExternalWriterProvider {
private static final Map<String, IExternalFileWriterFactoryProvider> CREATOR_MAP;
@@ -107,20 +113,66 @@
}
public static IExternalPrinterFactory createPrinter(ICcApplicationContext appCtx, IWriteDataSink sink,
- Object sourceType) {
+ Object sourceType) throws CompilationException {
Map<String, String> configuration = sink.getConfiguration();
String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
- // Only JSON is supported for now
- if (!ExternalDataConstants.FORMAT_JSON_LOWER_CASE.equalsIgnoreCase(format)) {
+ // Only JSON and parquet is supported for now
+ if (!ExternalDataConstants.FORMAT_JSON_LOWER_CASE.equalsIgnoreCase(format)
+ && !ExternalDataConstants.FORMAT_PARQUET.equalsIgnoreCase(format)) {
throw new UnsupportedOperationException("Unsupported format " + format);
}
String compression = getCompression(configuration);
- IExternalFileCompressStreamFactory compressStreamFactory =
- createCompressionStreamFactory(appCtx, compression, configuration);
- IPrinterFactory printerFactory = CleanJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(sourceType);
- return new TextualExternalFilePrinterFactory(printerFactory, compressStreamFactory);
+
+ switch (format) {
+ case ExternalDataConstants.FORMAT_JSON_LOWER_CASE:
+ IExternalFileCompressStreamFactory compressStreamFactory =
+ createCompressionStreamFactory(appCtx, compression, configuration);
+ IPrinterFactory printerFactory = CleanJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(sourceType);
+ return new TextualExternalFilePrinterFactory(printerFactory, compressStreamFactory);
+ case ExternalDataConstants.FORMAT_PARQUET:
+ String parquetSchemaString = configuration.get(ExternalDataConstants.PARQUET_SCHEMA_KEY);
+
+ CompressionCodecName compressionCodecName;
+ if (compression == null || compression.equals("") || compression.equals("none")) {
+ compressionCodecName = CompressionCodecName.UNCOMPRESSED;
+ } else {
+ compressionCodecName = CompressionCodecName.valueOf(compression.toUpperCase());
+ }
+
+ String rowGroupSizeString = getRowGroupSize(configuration);
+ String pageSizeString = getPageSize(configuration);
+
+ long rowGroupSize = StorageUtil.getByteValue(rowGroupSizeString);
+ int pageSize = (int) StorageUtil.getByteValue(pageSizeString);
+
+ ParquetProperties.WriterVersion writerVersion = getParquetWriterVersion(configuration);
+
+ return new ParquetExternalFilePrinterFactory(compressionCodecName, parquetSchemaString,
+ (IAType) sourceType, rowGroupSize, pageSize, writerVersion);
+ default:
+ throw new UnsupportedOperationException("Unsupported format " + format);
+ }
+ }
+
+ private static ParquetProperties.WriterVersion getParquetWriterVersion(Map<String, String> configuration) {
+ String writerVersionString = configuration.getOrDefault(ExternalDataConstants.PARQUET_WRITER_VERSION_KEY,
+ ExternalDataConstants.PARQUET_WRITER_VERSION_VALUE_1);
+ if (writerVersionString.equals(ExternalDataConstants.PARQUET_WRITER_VERSION_VALUE_2)) {
+ return ParquetProperties.WriterVersion.PARQUET_2_0;
+ }
+ return ParquetProperties.WriterVersion.PARQUET_1_0;
+ }
+
+ private static String getRowGroupSize(Map<String, String> configuration) {
+ return configuration.getOrDefault(ExternalDataConstants.KEY_PARQUET_ROW_GROUP_SIZE,
+ ExternalDataConstants.PARQUET_DEFAULT_ROW_GROUP_SIZE);
+ }
+
+ private static String getPageSize(Map<String, String> configuration) {
+ return configuration.getOrDefault(ExternalDataConstants.KEY_PARQUET_PAGE_SIZE,
+ ExternalDataConstants.PARQUET_DEFAULT_PAGE_SIZE);
}
private static String getFormat(Map<String, String> configuration) {
diff --git a/asterixdb/asterix-om/pom.xml b/asterixdb/asterix-om/pom.xml
index 6db4840..9eccb86 100644
--- a/asterixdb/asterix-om/pom.xml
+++ b/asterixdb/asterix-om/pom.xml
@@ -143,8 +143,8 @@
<artifactId>guava</artifactId>
</dependency>
<dependency>
- <groupId>com.esri.geometry</groupId>
- <artifactId>esri-geometry-api</artifactId>
+ <groupId>org.locationtech.jts</groupId>
+ <artifactId>jts-core</artifactId>
</dependency>
<dependency>
<groupId>javax.xml.bind</groupId>
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java
index 57f9898..fa37722 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AGeometryPartialBinaryComparatorFactory.java
@@ -51,7 +51,7 @@
if (c == 0) {
AGeometry geometry = AGeometrySerializerDeserializer.getAGeometryObject(b1, s1);
c = (geometry.getGeometry()
- .Equals(AGeometrySerializerDeserializer.getAGeometryObject(b2, s2).getGeometry())) ? 0 : 1;
+ .equals(AGeometrySerializerDeserializer.getAGeometryObject(b2, s2).getGeometry())) ? 0 : 1;
}
return c;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java
index 449ce13..c4da935 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/PrintTools.java
@@ -29,11 +29,16 @@
import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
import org.apache.asterix.om.base.temporal.GregorianCalendarSystem;
import org.apache.hyracks.algebricks.data.utils.WriteValueTools;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.util.bytes.HexPrinter;
import org.apache.hyracks.util.string.UTF8StringUtil;
+import org.locationtech.jts.geom.Geometry;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
public class PrintTools {
@@ -461,4 +466,23 @@
return highSurrogateSize + lowSurrogateSize;
}
+ /**
+ * Converts a JTS Geometry to a GeoJSON string. Returns Empty String in case of exceptions
+ *
+ * @param geometry The JTS Geometry to be converted.
+ * @return A GeoJSON string representation of the Geometry or an error message.
+ */
+ public static String geometryToGeoJSON(Geometry geometry) {
+ if (geometry == null) {
+ return "";
+ }
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.registerModule(new JtsModule());
+ try {
+ return mapper.writeValueAsString(geometry);
+ } catch (JsonProcessingException e) {
+ return "";
+ }
+ }
+
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java
index cc74f85..848e1bd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AGeometryPrinterFactory.java
@@ -23,11 +23,11 @@
import java.io.DataInputStream;
import java.io.PrintStream;
+import org.apache.asterix.dataflow.data.nontagged.printers.PrintTools;
import org.apache.asterix.dataflow.data.nontagged.serde.AGeometrySerializerDeserializer;
import org.apache.hyracks.algebricks.data.IPrinter;
import org.apache.hyracks.algebricks.data.IPrinterFactory;
-
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
public class AGeometryPrinterFactory implements IPrinterFactory {
@@ -37,8 +37,8 @@
public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> {
ByteArrayInputStream inStream = new ByteArrayInputStream(b, s + 1, l - 1);
DataInput dataIn = new DataInputStream(inStream);
- OGCGeometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
- ps.print(geometry.asGeoJson());
+ Geometry geometry = AGeometrySerializerDeserializer.INSTANCE.deserialize(dataIn).getGeometry();
+ ps.print(PrintTools.geometryToGeoJSON(geometry));
};
@Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java
index 38a4c46..28c171d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/AGeometrySerializerDeserializer.java
@@ -21,19 +21,18 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.nio.ByteBuffer;
import java.util.Arrays;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoFunctionUtils;
import org.apache.asterix.om.base.AGeometry;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-import com.esri.core.geometry.OperatorImportFromWkb;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WkbImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKBReader;
+import org.locationtech.jts.io.WKBWriter;
public class AGeometrySerializerDeserializer implements ISerializerDeserializer<AGeometry> {
@@ -41,24 +40,19 @@
public static final AGeometrySerializerDeserializer INSTANCE = new AGeometrySerializerDeserializer();
- /**Use WGS 84 (EPSG:4326) as the default coordinate reference system*/
- public static final SpatialReference DEFAULT_CRS = SpatialReference.create(4326);
-
private AGeometrySerializerDeserializer() {
}
@Override
public AGeometry deserialize(DataInput in) throws HyracksDataException {
+ WKBReader wkbReader = new WKBReader();
try {
int length = in.readInt();
byte[] bytes = new byte[length];
in.readFully(bytes);
- ByteBuffer buffer = ByteBuffer.wrap(bytes);
- OGCGeometry geometry = OGCGeometry.createFromOGCStructure(
- OperatorImportFromWkb.local().executeOGC(WkbImportFlags.wkbImportDefaults, buffer, null),
- DEFAULT_CRS);
+ Geometry geometry = wkbReader.read(bytes);
return new AGeometry(geometry);
- } catch (IOException e) {
+ } catch (IOException | ParseException e) {
throw HyracksDataException.create(e);
}
}
@@ -66,8 +60,10 @@
@Override
public void serialize(AGeometry instance, DataOutput out) throws HyracksDataException {
try {
- OGCGeometry geometry = instance.getGeometry();
- byte[] buffer = geometry.asBinary().array();
+ Geometry geometry = instance.getGeometry();
+ WKBWriter wkbWriter = new WKBWriter(GeoFunctionUtils.getCoordinateDimension(geometry),
+ GeoFunctionUtils.LITTLE_ENDIAN_BYTEORDER);
+ byte[] buffer = wkbWriter.write(geometry);
// For efficiency, we store the size of the geometry in bytes in the first 32 bits
// This allows AsterixDB to skip over this attribute if needed.
out.writeInt(buffer.length);
@@ -77,24 +73,40 @@
}
}
- public static int getAGeometrySizeOffset() throws HyracksDataException {
+ public void serialize(Geometry geometry, DataOutput out) throws HyracksDataException {
+ try {
+ WKBWriter wkbWriter = new WKBWriter(GeoFunctionUtils.getCoordinateDimension(geometry),
+ GeoFunctionUtils.LITTLE_ENDIAN_BYTEORDER);
+ byte[] buffer = wkbWriter.write(geometry);
+ // For efficiency, we store the size of the geometry in bytes in the first 32 bits
+ // This allows AsterixDB to skip over this attribute if needed.
+ out.writeInt(buffer.length);
+ out.write(buffer);
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
+ }
+ }
+
+ public static int getAGeometrySizeOffset() {
return 0;
}
public static AGeometry getAGeometryObject(byte[] bytes, int startOffset) throws HyracksDataException {
// Size of the AGeometry object is stored in bytes in the first 32 bits
// See serialize method
+ WKBReader wkbReader = new WKBReader();
int size = AInt32SerializerDeserializer.getInt(bytes, startOffset);
if (bytes.length < startOffset + size + 4)
// TODO(mmahin): this error code takes 5 parameters, and this is passing none, so I suspect this isn't right
throw RuntimeDataException.create(ErrorCode.VALUE_OUT_OF_RANGE);
-
- // Skip the size of the geometry in first 4 bytes
- byte[] bytes1 = Arrays.copyOfRange(bytes, startOffset + 4, startOffset + size + 4);
- ByteBuffer buffer = ByteBuffer.wrap(bytes1);
- OGCGeometry geometry = OGCGeometry.createFromOGCStructure(
- OperatorImportFromWkb.local().executeOGC(WkbImportFlags.wkbImportDefaults, buffer, null), DEFAULT_CRS);
- return new AGeometry(geometry);
+ try {
+ // Skip the size of the geometry in first 4 bytes
+ byte[] bytes1 = Arrays.copyOfRange(bytes, startOffset + 4, startOffset + size + 4);
+ Geometry geometry = wkbReader.read(bytes1);
+ return new AGeometry(geometry);
+ } catch (ParseException e) {
+ throw HyracksDataException.create(e);
+ }
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java
new file mode 100644
index 0000000..1e92b62
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts;
+
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.Geometry;
+
+public class GeoFunctionUtils {
+ public static final int LITTLE_ENDIAN_BYTEORDER = 2;
+
+ /**
+ * Returns the dimension of the coordinate based on whether Z or M is defined.
+ * TODO: Add 4th dimension support
+ * @param geometry The geometry to check.
+ * @return the dimensionality of the coordinate (2 or 3 or 4).
+ */
+ public static int getCoordinateDimension(Geometry geometry) {
+ int dimension = 2;
+ if (geometry == null || geometry.isEmpty()) {
+ return 2;
+ }
+ Coordinate sample = geometry.getCoordinates()[0];
+ if (!Double.isNaN(sample.getZ())) {
+ dimension++;
+ }
+
+ if (sample instanceof CoordinateXYZM) {
+ CoordinateXYZM firstCoordXYZM = (CoordinateXYZM) sample;
+ if (!Double.isNaN(firstCoordXYZM.getM())) {
+ dimension = 3;
+ }
+ }
+ return dimension;
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java
new file mode 100644
index 0000000..7f3d1de
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts;
+
+/**
+ * Constants used for GeoJSON serialization and deserialization of JTS geometries.
+ * This class provides string constants that represent various GeoJSON object types
+ * and properties, such as types of geometries and common GeoJSON attributes like
+ * coordinates and type.
+ * <p>
+ * These constants are used throughout the Jackson JTS module to ensure consistency
+ * in processing and generating GeoJSON.
+ */
+public class GeoJsonConstants {
+ public static final String POINT = "Point";
+ public static final String LINE_STRING = "LineString";
+ public static final String POLYGON = "Polygon";
+
+ public static final String MULTI_POINT = "MultiPoint";
+ public static final String MULTI_LINE_STRING = "MultiLineString";
+ public static final String MULTI_POLYGON = "MultiPolygon";
+
+ public static final String GEOMETRY_COLLECTION = "GeometryCollection";
+
+ public static final String TYPE = "type";
+
+ public static final String GEOMETRIES = "geometries";
+
+ public static final String COORDINATES = "coordinates";
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java
new file mode 100644
index 0000000..77b31ce
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/JtsModule.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.GenericGeometryParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.GeometryCollectionParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.LineStringParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.MultiLineStringParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.MultiPointParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.MultiPolygonParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.PointParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.PolygonParser;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde.GeometryDeserializer;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde.GeometrySerializer;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
+import org.locationtech.jts.geom.MultiPoint;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Point;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+
+/**
+ * A Jackson module for serializing and deserializing JTS (Java Topology Suite) geometry objects.
+ * This module provides custom serializers and deserializers capable of handling various types
+ * of geometries such as Points, LineStrings, Polygons, and their respective multi-part counterparts,
+ * as well as GeometryCollections.
+ * <p>
+ * It leverages a {@link GeometryFactory} for the creation of geometry objects during deserialization,
+ * ensuring that geometry objects are constructed appropriately.
+ */
+public class JtsModule extends SimpleModule {
+ private static final long serialVersionUID = 324082011931609589L;
+
+ public JtsModule() {
+ this(new GeometryFactory());
+ }
+
+ /**
+ * Constructs a JtsModule with a specified {@link GeometryFactory}.
+ * This constructor allows for customization of the geometry factory used for creating
+ * JTS geometry objects, providing flexibility for various precision and srid settings.
+ *
+ * @param geometryFactory the geometry factory to use for creating geometry objects
+ * during deserialization
+ */
+ public JtsModule(GeometryFactory geometryFactory) {
+ super("JtsModule", new Version(1, 0, 0, null, null, null));
+
+ addSerializer(Geometry.class, new GeometrySerializer());
+ GenericGeometryParser genericGeometryParser = new GenericGeometryParser(geometryFactory);
+ addDeserializer(Geometry.class, new GeometryDeserializer<>(genericGeometryParser));
+ addDeserializer(Point.class, new GeometryDeserializer<>(new PointParser(geometryFactory)));
+ addDeserializer(MultiPoint.class, new GeometryDeserializer<>(new MultiPointParser(geometryFactory)));
+ addDeserializer(LineString.class, new GeometryDeserializer<>(new LineStringParser(geometryFactory)));
+ addDeserializer(MultiLineString.class, new GeometryDeserializer<>(new MultiLineStringParser(geometryFactory)));
+ addDeserializer(Polygon.class, new GeometryDeserializer<>(new PolygonParser(geometryFactory)));
+ addDeserializer(MultiPolygon.class, new GeometryDeserializer<>(new MultiPolygonParser(geometryFactory)));
+ addDeserializer(GeometryCollection.class,
+ new GeometryDeserializer<>(new GeometryCollectionParser(geometryFactory, genericGeometryParser)));
+ }
+
+ @Override
+ public void setupModule(SetupContext context) {
+ super.setupModule(context);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java
new file mode 100644
index 0000000..4721680
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/BaseParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import org.locationtech.jts.geom.GeometryFactory;
+
+/**
+ * A base class for parsers that convert GeoJSON data into JTS Geometry objects.
+ * This class provides common functionality and a shared {@link GeometryFactory} for its subclasses.
+ * Subclasses of {@code BaseParser} use this shared geometry factory to construct specific types of geometries from
+ * their GeoJSON representations. By extending this base class, they inherit the common functionality and the ability
+ * to leverage the same geometry creation process.
+ */
+public class BaseParser {
+
+ protected GeometryFactory geometryFactory;
+
+ public BaseParser(GeometryFactory geometryFactory) {
+ this.geometryFactory = geometryFactory;
+ }
+
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java
new file mode 100644
index 0000000..d1c9475
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/GenericGeometryParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryFactory;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting various types of GeoJSON geometries into JTS {@link Geometry} objects using a specified {@link GeometryFactory}.
+ * This class acts as a general-purpose parser that can handle multiple types of GeoJSON geometries, including Point, MultiPoint,
+ * LineString, MultiLineString, Polygon, MultiPolygon, and GeometryCollection. It dynamically delegates the parsing to specific
+ * geometry parsers based on the GeoJSON type of the geometry.
+ * The parser extends {@code BaseParser} to utilize shared functionality and ensure consistent application of the {@link GeometryFactory}
+ * for creating JTS geometry instances. It maintains a registry of individual geometry parsers, each capable of handling a specific
+ * type of GeoJSON geometry.
+ */
+public class GenericGeometryParser extends BaseParser implements GeometryParser<Geometry> {
+
+ private Map<String, GeometryParser> parsers;
+
+ public GenericGeometryParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ parsers = new HashMap<>();
+ parsers.put(GeoJsonConstants.POINT, new PointParser(geometryFactory));
+ parsers.put(GeoJsonConstants.MULTI_POINT, new MultiPointParser(geometryFactory));
+ parsers.put(GeoJsonConstants.LINE_STRING, new LineStringParser(geometryFactory));
+ parsers.put(GeoJsonConstants.MULTI_LINE_STRING, new MultiLineStringParser(geometryFactory));
+ parsers.put(GeoJsonConstants.POLYGON, new PolygonParser(geometryFactory));
+ parsers.put(GeoJsonConstants.MULTI_POLYGON, new MultiPolygonParser(geometryFactory));
+ parsers.put(GeoJsonConstants.GEOMETRY_COLLECTION, new GeometryCollectionParser(geometryFactory, this));
+ }
+
+ @Override
+ public Geometry geometryFromJson(JsonNode node) throws JsonMappingException {
+ String typeName = node.get(GeoJsonConstants.TYPE).asText();
+ GeometryParser parser = parsers.get(typeName);
+ if (parser != null) {
+ return parser.geometryFromJson(node);
+ } else {
+ throw new JsonMappingException("Invalid geometry type: " + typeName);
+ }
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java
new file mode 100644
index 0000000..a945675
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/GeometryCollectionParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.GeometryFactory;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON geometry collection data into JTS {@link GeometryCollection} objects using a specified {@link GeometryFactory}.
+ * This class is designed to handle the parsing of GeoJSON representations of geometry collections, which may include multiple geometries
+ * of different types (e.g., Points, LineStrings, Polygons, etc.). It utilizes a {@link GenericGeometryParser} to handle the parsing of individual
+ * geometries within the collection.
+ *
+ * The parser extends {@code BaseParser} to leverage common functionality and ensure the consistent application of the {@link GeometryFactory}
+ * for creating {@link GeometryCollection} instances.
+ */
+public class GeometryCollectionParser extends BaseParser implements GeometryParser<GeometryCollection> {
+
+ private GenericGeometryParser genericGeometriesParser;
+
+ public GeometryCollectionParser(GeometryFactory geometryFactory, GenericGeometryParser genericGeometriesParser) {
+ super(geometryFactory);
+ this.genericGeometriesParser = genericGeometriesParser;
+ }
+
+ private Geometry[] geometriesFromJson(JsonNode arrayOfGeoms) throws JsonMappingException {
+ Geometry[] items = new Geometry[arrayOfGeoms.size()];
+ for (int i = 0; i != arrayOfGeoms.size(); ++i) {
+ items[i] = genericGeometriesParser.geometryFromJson(arrayOfGeoms.get(i));
+ }
+ return items;
+ }
+
+ @Override
+ public GeometryCollection geometryFromJson(JsonNode node) throws JsonMappingException {
+ return geometryFactory.createGeometryCollection(geometriesFromJson(node.get(GeoJsonConstants.GEOMETRIES)));
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java
new file mode 100644
index 0000000..5e3bacc
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/GeometryParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import org.locationtech.jts.geom.Geometry;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * An interface for parsing GeoJSON data into JTS {@link Geometry} objects.
+ * This interface defines a contract for classes that convert JSON representations of geometries
+ * into specific JTS geometry instances, such as Points, LineStrings, Polygons, etc.
+ * The {@code GeometryParser} interface ensures a standard method is available for deserializing
+ * GeoJSON structures into their corresponding JTS geometrical forms. Implementations of this interface
+ * are responsible for handling the parsing logic for different types of geometries.
+ *
+ * @param <T> the type of JTS Geometry that the parser will produce, such as Point, LineString, Polygon, etc.
+ */
+public interface GeometryParser<T extends Geometry> {
+
+ T geometryFromJson(JsonNode node) throws JsonMappingException;
+
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java
new file mode 100644
index 0000000..1c744db
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/LineStringParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON line string data into JTS {@link LineString} objects using a specified {@link GeometryFactory}.
+ * This class parses JSON representations of line strings, which are essentially ordered sets of coordinates that form a continuous line.
+ * It supports the extraction and transformation of coordinate arrays from GeoJSON into JTS LineString geometries.
+ * The parser extends {@code BaseParser} to utilize common functionalities and ensures the consistent application of the {@link GeometryFactory}
+ * for creating {@link LineString} instances.
+ */
+public class LineStringParser extends BaseParser implements GeometryParser<LineString> {
+
+ public LineStringParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ }
+
+ public LineString lineStringFromJson(JsonNode root) {
+ return geometryFactory.createLineString(PointParser.coordinatesFromJson(root.get(COORDINATES)));
+ }
+
+ @Override
+ public LineString geometryFromJson(JsonNode node) throws JsonMappingException {
+ return lineStringFromJson(node);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java
new file mode 100644
index 0000000..9a7fbae
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/MultiLineStringParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON multi-line string data into JTS {@link MultiLineString} objects using a specified {@link GeometryFactory}.
+ * This class handles the parsing of multi-line string geometries, where each line string is represented by an array of coordinate arrays.
+ * It supports the conversion of complex multi-line string geometries, which may consist of multiple, distinct line strings.
+ * The parser extends {@code BaseParser} to utilize shared functionality and ensure the consistent application of the {@link GeometryFactory}
+ * in creating {@link MultiLineString} instances.
+ */
+public class MultiLineStringParser extends BaseParser implements GeometryParser<MultiLineString> {
+
+ public MultiLineStringParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ }
+
+ public MultiLineString multiLineStringFromJson(JsonNode root) {
+ return geometryFactory.createMultiLineString(lineStringsFromJson(root.get(COORDINATES)));
+ }
+
+ private LineString[] lineStringsFromJson(JsonNode array) {
+ LineString[] strings = new LineString[array.size()];
+ for (int i = 0; i != array.size(); ++i) {
+ strings[i] = geometryFactory.createLineString(PointParser.coordinatesFromJson(array.get(i)));
+ }
+ return strings;
+ }
+
+ @Override
+ public MultiLineString geometryFromJson(JsonNode node) throws JsonMappingException {
+ return multiLineStringFromJson(node);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java
new file mode 100644
index 0000000..a27d60a
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/MultiPointParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.MultiPoint;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON multi-point data into JTS {@link MultiPoint} objects using a specified {@link GeometryFactory}.
+ * This class is capable of parsing JSON representations of multi-point geometries, where each point is represented
+ * by an array of coordinates. The parser extends {@code BaseParser} to utilize common functionality and ensure
+ * the {@link GeometryFactory} is applied consistently to create {@link MultiPoint} instances.
+ */
+public class MultiPointParser extends BaseParser implements GeometryParser<MultiPoint> {
+
+ public MultiPointParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ }
+
+ public MultiPoint multiPointFromJson(JsonNode root) {
+ return geometryFactory.createMultiPointFromCoords(PointParser.coordinatesFromJson(root.get(COORDINATES)));
+ }
+
+ @Override
+ public MultiPoint geometryFromJson(JsonNode node) throws JsonMappingException {
+ return multiPointFromJson(node);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java
new file mode 100644
index 0000000..c9a4235
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/MultiPolygonParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for converting GeoJSON multi-polygon data into JTS {@link MultiPolygon} objects using a specified {@link GeometryFactory}.
+ * This class leverages a helper {@link PolygonParser} to parse individual polygons from a collection of polygons represented
+ * in a GeoJSON format. It supports parsing complex multi-polygon geometries, which can include multiple outer boundaries and
+ * their respective inner holes.
+ * The parser extends {@code BaseParser} to make use of common functionalities and ensure the consistent application of the
+ * {@link GeometryFactory} in creating {@link MultiPolygon} instances.
+ */
+public class MultiPolygonParser extends BaseParser implements GeometryParser<MultiPolygon> {
+
+ private PolygonParser helperParser;
+
+ public MultiPolygonParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ helperParser = new PolygonParser(geometryFactory);
+ }
+
+ public MultiPolygon multiPolygonFromJson(JsonNode root) {
+ JsonNode arrayOfPolygons = root.get(COORDINATES);
+ return geometryFactory.createMultiPolygon(polygonsFromJson(arrayOfPolygons));
+ }
+
+ private Polygon[] polygonsFromJson(JsonNode arrayOfPolygons) {
+ Polygon[] polygons = new Polygon[arrayOfPolygons.size()];
+ for (int i = 0; i != arrayOfPolygons.size(); ++i) {
+ polygons[i] = helperParser.polygonFromJsonArrayOfRings(arrayOfPolygons.get(i));
+ }
+ return polygons;
+ }
+
+ @Override
+ public MultiPolygon geometryFromJson(JsonNode node) throws JsonMappingException {
+ return multiPolygonFromJson(node);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java
new file mode 100644
index 0000000..a91a326
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/PointParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.Coordinate;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.Point;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * Parses JSON representations of GeoJSON Points into JTS {@link Point} objects.
+ * This parser handles the extraction of coordinates from GeoJSON and converts them
+ * into {@link Point} geometries using a provided {@link GeometryFactory}.
+ *
+ * This class supports reading points defined with two-dimensional (x, y),
+ * three-dimensional (x, y, z), and four-dimensional (x, y, z, m) coordinates.
+ */
+public class PointParser extends BaseParser implements GeometryParser<Point> {
+
+ public PointParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ }
+
+ public static Coordinate coordinateFromJson(JsonNode array) {
+ assert array.isArray() && (array.size() == 2 || array.size() == 3
+ || array.size() == 4) : "expecting coordinate array with single point [ x, y, |z|, |m| ]";
+
+ if (array.size() == 2) {
+ return new Coordinate(array.get(0).asDouble(), array.get(1).asDouble());
+ }
+
+ if (array.size() == 3) {
+ return new Coordinate(array.get(0).asDouble(), array.get(1).asDouble(), array.get(2).asDouble());
+ }
+
+ return new CoordinateXYZM(array.get(0).asDouble(), array.get(1).asDouble(), array.get(2).asDouble(),
+ array.get(3).asDouble());
+ }
+
+ public static Coordinate[] coordinatesFromJson(JsonNode array) {
+ Coordinate[] points = new Coordinate[array.size()];
+ for (int i = 0; i != array.size(); ++i) {
+ points[i] = PointParser.coordinateFromJson(array.get(i));
+ }
+ return points;
+ }
+
+ public Point pointFromJson(JsonNode node) {
+ return geometryFactory.createPoint(coordinateFromJson(node.get(COORDINATES)));
+ }
+
+ @Override
+ public Point geometryFromJson(JsonNode node) throws JsonMappingException {
+ return pointFromJson(node);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java
new file mode 100644
index 0000000..5c7492a
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/parsers/PolygonParser.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers;
+
+import static org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants.COORDINATES;
+
+import org.locationtech.jts.geom.GeometryFactory;
+import org.locationtech.jts.geom.LinearRing;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A parser for transforming GeoJSON polygon data into JTS {@link Polygon} objects using a specified {@link GeometryFactory}.
+ * This class handles the parsing of polygons, which may include an outer boundary and any number of inner holes.
+ * Each polygon is defined by arrays of coordinates that represent linear rings—the first array defines the exterior boundary,
+ * and any subsequent arrays define interior holes.
+ * This parser extends {@code BaseParser} to leverage shared functionality and ensure consistent application of the
+ * {@link GeometryFactory} in creating polygon geometries.
+ */
+public class PolygonParser extends BaseParser implements GeometryParser<Polygon> {
+
+ public PolygonParser(GeometryFactory geometryFactory) {
+ super(geometryFactory);
+ }
+
+ public Polygon polygonFromJson(JsonNode node) {
+ JsonNode arrayOfRings = node.get(COORDINATES);
+ return polygonFromJsonArrayOfRings(arrayOfRings);
+ }
+
+ public Polygon polygonFromJsonArrayOfRings(JsonNode arrayOfRings) {
+ LinearRing shell = linearRingsFromJson(arrayOfRings.get(0));
+ int size = arrayOfRings.size();
+ LinearRing[] holes = new LinearRing[size - 1];
+ for (int i = 1; i < size; i++) {
+ holes[i - 1] = linearRingsFromJson(arrayOfRings.get(i));
+ }
+ return geometryFactory.createPolygon(shell, holes);
+ }
+
+ private LinearRing linearRingsFromJson(JsonNode coordinates) {
+ assert coordinates.isArray() : "expected coordinates array";
+ return geometryFactory.createLinearRing(PointParser.coordinatesFromJson(coordinates));
+ }
+
+ @Override
+ public Polygon geometryFromJson(JsonNode node) throws JsonMappingException {
+ return polygonFromJson(node);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java
new file mode 100644
index 0000000..1cebfcd
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/serialization/GeometryDeserializer.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde;
+
+import java.io.IOException;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.parsers.GeometryParser;
+import org.locationtech.jts.geom.Geometry;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.ObjectCodec;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonDeserializer;
+import com.fasterxml.jackson.databind.JsonNode;
+
+/**
+ * A custom Jackson deserializer for JTS Geometry objects.
+ * This deserializer translates JSON structures into JTS Geometry instances using a specified
+ * {@link GeometryParser}. It supports generic geometry types, allowing for flexible deserialization
+ * of various specific types of geometries such as Point, LineString, Polygon, etc.
+ * The deserializer relies on a geometry parser which must be provided during instantiation.
+ * The parser is responsible for converting a JSON node into a corresponding JTS Geometry object.
+ * Usage:
+ * This deserializer is registered in the Jackson JTS
+ * module {@link org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule} and is used to configure an
+ * ObjectMapper to support JTS geometries.
+ */
+public class GeometryDeserializer<T extends Geometry> extends JsonDeserializer<T> {
+
+ private GeometryParser<T> geometryParser;
+
+ public GeometryDeserializer(GeometryParser<T> geometryParser) {
+ this.geometryParser = geometryParser;
+ }
+
+ /**
+ * Deserializes a JSON node into a JTS Geometry object.
+ * The JSON node is processed by the configured GeometryParser to produce the Geometry instance.
+ *
+ * @param jsonParser the Jackson parser reading the JSON content
+ * @param deserializationContext the Jackson deserialization context
+ * @return the deserialized JTS Geometry object
+ * @throws IOException if there is an issue in reading or parsing the JSON node
+ */
+ @Override
+ public T deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
+ ObjectCodec oc = jsonParser.getCodec();
+ JsonNode root = oc.readTree(jsonParser);
+ return geometryParser.geometryFromJson(root);
+ }
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java
new file mode 100644
index 0000000..3207afb
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * This file includes code derived from the project "jackson-datatype-jts"
+ * under the Apache License 2.0.
+ * Original source: https://github.com/bedatadriven/jackson-datatype-jts/blob/master/src/main/java/com/bedatadriven/jackson/datatype/jts/serialization/GeometrySerializer.java.
+ *
+ * Modifications:
+ * - Adapted the code to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ *
+ * The modified version retains the original license and notices. For more information
+ * on the original project and licensing, please visit https://github.com/bedatadriven/jackson-datatype-jts.
+ */
+package org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.serde;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.GeoJsonConstants;
+import org.locationtech.jts.geom.CoordinateXYZM;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.geom.GeometryCollection;
+import org.locationtech.jts.geom.LineString;
+import org.locationtech.jts.geom.MultiLineString;
+import org.locationtech.jts.geom.MultiPoint;
+import org.locationtech.jts.geom.MultiPolygon;
+import org.locationtech.jts.geom.Point;
+import org.locationtech.jts.geom.Polygon;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+
+/**
+ * A custom Jackson serializer for JTS Geometry objects that translates these objects into their GeoJSON representations.
+ * This class supports serialization for all primary JTS geometry types including Point, LineString, Polygon, and their
+ * respective collections such as MultiPoint, MultiLineString, MultiPolygon, and GeometryCollection.
+ * It handles complex geometries by delegating to specific methods based on the instance type of the geometry object,
+ * ensuring that each geometry type is correctly represented according to the GeoJSON standard.
+ */
+public class GeometrySerializer extends JsonSerializer<Geometry> {
+
+ @Override
+ public void serialize(Geometry value, JsonGenerator jsonGenerator, SerializerProvider provider) throws IOException {
+ writeGeometry(jsonGenerator, value);
+ }
+
+ /**
+ * Writes the geometry object to the JsonGenerator. This method determines the type of the geometry
+ * and calls the appropriate method to handle the serialization.
+ *
+ * @param jsonGenerator the JsonGenerator to use for writing the GeoJSON
+ * @param value the Geometry object to serialize
+ * @throws IOException if an input/output error occurs
+ */
+ public void writeGeometry(JsonGenerator jsonGenerator, Geometry value) throws IOException {
+ if (value instanceof Polygon) {
+ writePolygon(jsonGenerator, (Polygon) value);
+
+ } else if (value instanceof Point) {
+ writePoint(jsonGenerator, (Point) value);
+
+ } else if (value instanceof MultiPoint) {
+ writeMultiPoint(jsonGenerator, (MultiPoint) value);
+
+ } else if (value instanceof MultiPolygon) {
+ writeMultiPolygon(jsonGenerator, (MultiPolygon) value);
+
+ } else if (value instanceof LineString) {
+ writeLineString(jsonGenerator, (LineString) value);
+
+ } else if (value instanceof MultiLineString) {
+ writeMultiLineString(jsonGenerator, (MultiLineString) value);
+
+ } else if (value instanceof GeometryCollection) {
+ writeGeometryCollection(jsonGenerator, (GeometryCollection) value);
+
+ } else {
+ throw new JsonMappingException(jsonGenerator,
+ "Geometry type " + value.getClass().getName() + " cannot be serialized as GeoJSON."
+ + "Supported types are: "
+ + Arrays.asList(Point.class.getName(), LineString.class.getName(), Polygon.class.getName(),
+ MultiPoint.class.getName(), MultiLineString.class.getName(),
+ MultiPolygon.class.getName(), GeometryCollection.class.getName()));
+ }
+ }
+
+ private void writeGeometryCollection(JsonGenerator jsonGenerator, GeometryCollection value) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.GEOMETRY_COLLECTION);
+ jsonGenerator.writeArrayFieldStart(GeoJsonConstants.GEOMETRIES);
+
+ for (int i = 0; i != value.getNumGeometries(); ++i) {
+ writeGeometry(jsonGenerator, value.getGeometryN(i));
+ }
+
+ jsonGenerator.writeEndArray();
+ jsonGenerator.writeEndObject();
+ }
+
+ private void writeMultiPoint(JsonGenerator jsonGenerator, MultiPoint value) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.MULTI_POINT);
+ jsonGenerator.writeArrayFieldStart(GeoJsonConstants.COORDINATES);
+
+ for (int i = 0; i != value.getNumGeometries(); ++i) {
+ writePointCoordinates(jsonGenerator, (Point) value.getGeometryN(i));
+ }
+
+ jsonGenerator.writeEndArray();
+ jsonGenerator.writeEndObject();
+ }
+
+ private void writeMultiLineString(JsonGenerator jsonGenerator, MultiLineString value) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.MULTI_LINE_STRING);
+ jsonGenerator.writeArrayFieldStart(GeoJsonConstants.COORDINATES);
+
+ for (int i = 0; i != value.getNumGeometries(); ++i) {
+ writeLineStringCoordinates(jsonGenerator, (LineString) value.getGeometryN(i));
+ }
+
+ jsonGenerator.writeEndArray();
+ jsonGenerator.writeEndObject();
+ }
+
+ @Override
+ public Class<Geometry> handledType() {
+ return Geometry.class;
+ }
+
+ private void writeMultiPolygon(JsonGenerator jsonGenerator, MultiPolygon value) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.MULTI_POLYGON);
+ jsonGenerator.writeArrayFieldStart(GeoJsonConstants.COORDINATES);
+
+ for (int i = 0; i != value.getNumGeometries(); ++i) {
+ writePolygonCoordinates(jsonGenerator, (Polygon) value.getGeometryN(i));
+ }
+
+ jsonGenerator.writeEndArray();
+ jsonGenerator.writeEndObject();
+ }
+
+ private void writePolygon(JsonGenerator jsonGenerator, Polygon value) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.POLYGON);
+ jsonGenerator.writeFieldName(GeoJsonConstants.COORDINATES);
+ writePolygonCoordinates(jsonGenerator, value);
+
+ jsonGenerator.writeEndObject();
+ }
+
+ private void writePolygonCoordinates(JsonGenerator jsonGenerator, Polygon value) throws IOException {
+ jsonGenerator.writeStartArray();
+ writeLineStringCoordinates(jsonGenerator, value.getExteriorRing());
+
+ for (int i = 0; i < value.getNumInteriorRing(); ++i) {
+ writeLineStringCoordinates(jsonGenerator, value.getInteriorRingN(i));
+ }
+ jsonGenerator.writeEndArray();
+ }
+
+ private void writeLineStringCoordinates(JsonGenerator jsonGenerator, LineString ring) throws IOException {
+ jsonGenerator.writeStartArray();
+ for (int i = 0; i != ring.getNumPoints(); ++i) {
+ Point p = ring.getPointN(i);
+ writePointCoordinates(jsonGenerator, p);
+ }
+ jsonGenerator.writeEndArray();
+ }
+
+ private void writeLineString(JsonGenerator jsonGenerator, LineString lineString) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.LINE_STRING);
+ jsonGenerator.writeFieldName(GeoJsonConstants.COORDINATES);
+ writeLineStringCoordinates(jsonGenerator, lineString);
+ jsonGenerator.writeEndObject();
+ }
+
+ private void writePoint(JsonGenerator jsonGenerator, Point p) throws IOException {
+ jsonGenerator.writeStartObject();
+ jsonGenerator.writeStringField(GeoJsonConstants.TYPE, GeoJsonConstants.POINT);
+ jsonGenerator.writeFieldName(GeoJsonConstants.COORDINATES);
+ writePointCoordinates(jsonGenerator, p);
+ jsonGenerator.writeEndObject();
+ }
+
+ private void writePointCoordinates(JsonGenerator jsonGenerator, Point p) throws IOException {
+ jsonGenerator.writeStartArray();
+
+ writeFormattedNumber(jsonGenerator, p.getCoordinate().x);
+ writeFormattedNumber(jsonGenerator, p.getCoordinate().y);
+
+ if (!Double.isNaN(p.getCoordinate().z)) {
+ writeFormattedNumber(jsonGenerator, p.getCoordinate().z);
+ }
+
+ if (p.getCoordinate() instanceof CoordinateXYZM) {
+ double m = p.getCoordinate().getM();
+ writeFormattedNumber(jsonGenerator, m);
+ }
+ jsonGenerator.writeEndArray();
+ }
+
+ private void writeFormattedNumber(JsonGenerator jsonGenerator, double value) throws IOException {
+ if ((value == Math.floor(value)) && !Double.isInfinite(value)) {
+ jsonGenerator.writeNumber((int) value);
+ } else {
+ jsonGenerator.writeNumber(value);
+ }
+ }
+
+}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java
index 84a2e81..c9d66b4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeometry.java
@@ -20,22 +20,23 @@
import java.io.IOException;
+import org.apache.asterix.dataflow.data.nontagged.serde.jacksonjts.JtsModule;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
+import org.locationtech.jts.geom.Geometry;
-import com.esri.core.geometry.ogc.OGCGeometry;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class AGeometry implements IAObject {
- protected OGCGeometry geometry;
+ protected Geometry geometry;
- public AGeometry(OGCGeometry geometry) {
+ public AGeometry(Geometry geometry) {
this.geometry = geometry;
}
- public OGCGeometry getGeometry() {
+ public Geometry getGeometry() {
return geometry;
}
@@ -67,11 +68,13 @@
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
- ObjectNode json = null;
+ om.registerModule(new JtsModule());
+ ObjectNode json;
try {
- json = (ObjectNode) om.readTree(geometry.asGeoJson());
+ String geoJson = om.writeValueAsString(geometry);
+ json = (ObjectNode) om.readTree(geoJson);
} catch (IOException e) {
- throw new RuntimeException(e);
+ return om.createObjectNode();
}
return json;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java
index 346d68a..925a0c9 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableGeometry.java
@@ -18,29 +18,32 @@
*/
package org.apache.asterix.om.base;
-import com.esri.core.geometry.OGCStructure;
-import com.esri.core.geometry.OperatorImportFromWkt;
-import com.esri.core.geometry.SpatialReference;
-import com.esri.core.geometry.WktImportFlags;
-import com.esri.core.geometry.ogc.OGCGeometry;
+import org.locationtech.jts.geom.Geometry;
+import org.locationtech.jts.io.ParseException;
+import org.locationtech.jts.io.WKTReader;
public class AMutableGeometry extends AGeometry {
+ private Geometry geometry;
+ private final WKTReader wktReader = new WKTReader();
- private OperatorImportFromWkt wktImporter;
-
- public AMutableGeometry(OGCGeometry geom) {
+ public AMutableGeometry(Geometry geom) {
super(geom);
- wktImporter = OperatorImportFromWkt.local();
- }
-
- public void setValue(OGCGeometry geom) {
this.geometry = geom;
}
- public void parseWKT(String wkt) {
- OGCStructure structure;
+ public void setValue(Geometry geom) {
+ this.geometry = geom;
+ }
- structure = wktImporter.executeOGC(WktImportFlags.wktImportNonTrusted, wkt, null);
- this.geometry = OGCGeometry.createFromOGCStructure(structure, SpatialReference.create(4326));
+ public Geometry getGeometry() {
+ return this.geometry;
+ }
+
+ public void parseWKT(String wkt) {
+ try {
+ this.geometry = wktReader.read(wkt);
+ } catch (ParseException e) {
+ throw new RuntimeException(e);
+ }
}
}
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/AbstractFieldNamesDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/AbstractFieldNamesDictionary.java
similarity index 96%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/AbstractFieldNamesDictionary.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/AbstractFieldNamesDictionary.java
index bffdb33..b2cd223 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/AbstractFieldNamesDictionary.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/AbstractFieldNamesDictionary.java
@@ -16,14 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
-
-package org.apache.asterix.column.metadata.dictionary;
+package org.apache.asterix.om.dictionary;
import java.io.DataInput;
import java.io.IOException;
import java.util.List;
-import org.apache.asterix.column.metadata.IFieldNamesDictionary;
import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import org.apache.asterix.om.base.AMutableString;
import org.apache.hyracks.api.exceptions.HyracksDataException;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/ByteToNodeMap.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/ByteToNodeMap.java
similarity index 97%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/ByteToNodeMap.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/ByteToNodeMap.java
index 73c034b..7bfae0b 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/ByteToNodeMap.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/ByteToNodeMap.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.apache.asterix.column.metadata.dictionary;
+package org.apache.asterix.om.dictionary;
import java.io.DataInput;
import java.io.DataOutput;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNameTrie.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNameTrie.java
similarity index 98%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNameTrie.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNameTrie.java
index 4a19cd6..5333af5 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNameTrie.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNameTrie.java
@@ -16,9 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.apache.asterix.column.metadata.dictionary;
+package org.apache.asterix.om.dictionary;
-import static org.apache.asterix.column.metadata.dictionary.AbstractFieldNamesDictionary.deserializeFieldNames;
+import static org.apache.asterix.om.dictionary.AbstractFieldNamesDictionary.deserializeFieldNames;
import java.io.DataInput;
import java.io.DataOutput;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesHashDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesHashDictionary.java
similarity index 98%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesHashDictionary.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesHashDictionary.java
index 73c9a73..591f322 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesHashDictionary.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesHashDictionary.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.apache.asterix.column.metadata.dictionary;
+package org.apache.asterix.om.dictionary;
import java.io.DataInput;
import java.io.DataInputStream;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesTrieDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesTrieDictionary.java
similarity index 97%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesTrieDictionary.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesTrieDictionary.java
index 10de829..30e10d6 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/FieldNamesTrieDictionary.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/FieldNamesTrieDictionary.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.apache.asterix.column.metadata.dictionary;
+package org.apache.asterix.om.dictionary;
import java.io.DataInput;
import java.io.DataInputStream;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/IFieldNamesDictionary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/IFieldNamesDictionary.java
similarity index 97%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/IFieldNamesDictionary.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/IFieldNamesDictionary.java
index 8aa0e88..ec494fa 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/IFieldNamesDictionary.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/IFieldNamesDictionary.java
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.apache.asterix.column.metadata;
+package org.apache.asterix.om.dictionary;
import java.io.DataInputStream;
import java.io.DataOutput;
diff --git a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/TrieNode.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/TrieNode.java
similarity index 98%
rename from asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/TrieNode.java
rename to asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/TrieNode.java
index 32e902b..9ea2978 100644
--- a/asterixdb/asterix-column/src/main/java/org/apache/asterix/column/metadata/dictionary/TrieNode.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/dictionary/TrieNode.java
@@ -16,8 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-
-package org.apache.asterix.column.metadata.dictionary;
+package org.apache.asterix.om.dictionary;
import java.io.DataInput;
import java.io.DataOutput;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
index 57b842a..27f2160 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
@@ -82,6 +82,7 @@
import org.apache.asterix.om.typecomputer.impl.CollectionToSequenceTypeComputer;
import org.apache.asterix.om.typecomputer.impl.ConcatNonNullTypeComputer;
import org.apache.asterix.om.typecomputer.impl.ConcatTypeComputer;
+import org.apache.asterix.om.typecomputer.impl.CountNTypeComputer;
import org.apache.asterix.om.typecomputer.impl.DoubleIfTypeComputer;
import org.apache.asterix.om.typecomputer.impl.FieldAccessByIndexResultType;
import org.apache.asterix.om.typecomputer.impl.FieldAccessByNameResultType;
@@ -417,6 +418,7 @@
public static final FunctionIdentifier LISTIFY = FunctionConstants.newAsterix("listify", 1);
public static final FunctionIdentifier AVG = FunctionConstants.newAsterix("agg-avg", 1);
public static final FunctionIdentifier COUNT = FunctionConstants.newAsterix("agg-count", 1);
+ public static final FunctionIdentifier COUNTN = FunctionConstants.newAsterix("agg-countn", 1);
public static final FunctionIdentifier SUM = FunctionConstants.newAsterix("agg-sum", 1);
public static final FunctionIdentifier LOCAL_SUM = FunctionConstants.newAsterix("agg-local-sum", 1);
public static final FunctionIdentifier INTERMEDIATE_SUM = FunctionConstants.newAsterix("agg-intermediate-sum", 1);
@@ -482,6 +484,7 @@
public static final FunctionIdentifier SCALAR_ARRAYAGG = FunctionConstants.newAsterix("arrayagg", 1);
public static final FunctionIdentifier SCALAR_AVG = FunctionConstants.newAsterix("avg", 1);
public static final FunctionIdentifier SCALAR_COUNT = FunctionConstants.newAsterix("count", 1);
+ public static final FunctionIdentifier SCALAR_COUNTN = FunctionConstants.newAsterix("countn", 1);
public static final FunctionIdentifier SCALAR_SUM = FunctionConstants.newAsterix("sum", 1);
public static final FunctionIdentifier SCALAR_MAX = FunctionConstants.newAsterix("max", 1);
public static final FunctionIdentifier SCALAR_MIN = FunctionConstants.newAsterix("min", 1);
@@ -501,6 +504,7 @@
// serializable aggregate functions
public static final FunctionIdentifier SERIAL_AVG = FunctionConstants.newAsterix("avg-serial", 1);
public static final FunctionIdentifier SERIAL_COUNT = FunctionConstants.newAsterix("count-serial", 1);
+ public static final FunctionIdentifier SERIAL_COUNTN = FunctionConstants.newAsterix("countn-serial", 1);
public static final FunctionIdentifier SERIAL_SUM = FunctionConstants.newAsterix("sum-serial", 1);
public static final FunctionIdentifier SERIAL_LOCAL_SUM = FunctionConstants.newAsterix("local-sum-serial", 1);
public static final FunctionIdentifier SERIAL_INTERMEDIATE_SUM =
@@ -559,6 +563,8 @@
FunctionConstants.newAsterix("arrayagg-distinct", 1);
public static final FunctionIdentifier COUNT_DISTINCT = FunctionConstants.newAsterix("agg-count-distinct", 1);
public static final FunctionIdentifier SCALAR_COUNT_DISTINCT = FunctionConstants.newAsterix("count-distinct", 1);
+ public static final FunctionIdentifier COUNTN_DISTINCT = FunctionConstants.newAsterix("agg-countn-distinct", 1);
+ public static final FunctionIdentifier SCALAR_COUNTN_DISTINCT = FunctionConstants.newAsterix("countn-distinct", 1);
public static final FunctionIdentifier SUM_DISTINCT = FunctionConstants.newAsterix("agg-sum-distinct", 1);
public static final FunctionIdentifier SCALAR_SUM_DISTINCT = FunctionConstants.newAsterix("sum-distinct", 1);
public static final FunctionIdentifier AVG_DISTINCT = FunctionConstants.newAsterix("agg-avg-distinct", 1);
@@ -593,6 +599,7 @@
public static final FunctionIdentifier INTERMEDIATE_SQL_AVG =
FunctionConstants.newAsterix("intermediate-agg-sql-avg", 1);
public static final FunctionIdentifier SQL_COUNT = FunctionConstants.newAsterix("agg-sql-count", 1);
+ public static final FunctionIdentifier SQL_COUNTN = FunctionConstants.newAsterix("agg-sql-countn", 1);
public static final FunctionIdentifier SQL_SUM = FunctionConstants.newAsterix("agg-sql-sum", 1);
public static final FunctionIdentifier LOCAL_SQL_SUM = FunctionConstants.newAsterix("agg-local-sql-sum", 1);
public static final FunctionIdentifier INTERMEDIATE_SQL_SUM =
@@ -666,6 +673,7 @@
public static final FunctionIdentifier SCALAR_SQL_AVG = FunctionConstants.newAsterix("sql-avg", 1);
public static final FunctionIdentifier SCALAR_SQL_COUNT = FunctionConstants.newAsterix("sql-count", 1);
+ public static final FunctionIdentifier SCALAR_SQL_COUNTN = FunctionConstants.newAsterix("sql-countn", 1);
public static final FunctionIdentifier SCALAR_SQL_SUM = FunctionConstants.newAsterix("sql-sum", 1);
public static final FunctionIdentifier SCALAR_SQL_MAX = FunctionConstants.newAsterix("sql-max", 1);
public static final FunctionIdentifier SCALAR_SQL_MIN = FunctionConstants.newAsterix("sql-min", 1);
@@ -681,6 +689,7 @@
// serializable sql aggregate functions
public static final FunctionIdentifier SERIAL_SQL_AVG = FunctionConstants.newAsterix("sql-avg-serial", 1);
public static final FunctionIdentifier SERIAL_SQL_COUNT = FunctionConstants.newAsterix("sql-count-serial", 1);
+ public static final FunctionIdentifier SERIAL_SQL_COUNTN = FunctionConstants.newAsterix("sql-countn-serial", 1);
public static final FunctionIdentifier SERIAL_SQL_SUM = FunctionConstants.newAsterix("sql-sum-serial", 1);
public static final FunctionIdentifier SERIAL_LOCAL_SQL_SUM =
FunctionConstants.newAsterix("local-sql-sum-serial", 1);
@@ -744,6 +753,10 @@
FunctionConstants.newAsterix("agg-sql-count-distinct", 1);
public static final FunctionIdentifier SCALAR_SQL_COUNT_DISTINCT =
FunctionConstants.newAsterix("sql-count-distinct", 1);
+ public static final FunctionIdentifier SQL_COUNTN_DISTINCT =
+ FunctionConstants.newAsterix("agg-sql-countn-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SQL_COUNTN_DISTINCT =
+ FunctionConstants.newAsterix("sql-countn-distinct", 1);
public static final FunctionIdentifier SQL_SUM_DISTINCT = FunctionConstants.newAsterix("agg-sql-sum-distinct", 1);
public static final FunctionIdentifier SCALAR_SQL_SUM_DISTINCT =
FunctionConstants.newAsterix("sql-sum-distinct", 1);
@@ -1575,6 +1588,7 @@
addPrivateFunction(GLOBAL_MIN, MinMaxAggTypeComputer.INSTANCE, true);
addPrivateFunction(NON_EMPTY_STREAM, ABooleanTypeComputer.INSTANCE, true);
addFunction(COUNT, AInt64TypeComputer.INSTANCE, true);
+ addFunction(COUNTN, CountNTypeComputer.INSTANCE, true);
addPrivateFunction(LOCAL_AVG, LocalAvgTypeComputer.INSTANCE, true);
addFunction(AVG, NullableDoubleTypeComputer.INSTANCE, true);
addPrivateFunction(GLOBAL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
@@ -1623,11 +1637,13 @@
addPrivateFunction(SERIAL_SQL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_SQL_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_SQL_COUNTN, CountNTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_GLOBAL_SQL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_LOCAL_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_INTERMEDIATE_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
addFunction(SCALAR_AVG, NullableDoubleTypeComputer.INSTANCE, true);
addFunction(SCALAR_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addFunction(SCALAR_COUNTN, CountNTypeComputer.INSTANCE, true);
addFunction(SCALAR_MAX, scalarMinMaxTypeComputer, true);
addFunction(SCALAR_MIN, scalarMinMaxTypeComputer, true);
addPrivateFunction(INTERMEDIATE_AVG, LocalAvgTypeComputer.INSTANCE, true);
@@ -1685,6 +1701,7 @@
addPrivateFunction(LOCAL_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
addPrivateFunction(INTERMEDIATE_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
addFunction(SQL_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addFunction(SQL_COUNTN, CountNTypeComputer.INSTANCE, true);
addFunction(SQL_MAX, MinMaxAggTypeComputer.INSTANCE, true);
addPrivateFunction(LOCAL_SQL_MAX, MinMaxAggTypeComputer.INSTANCE, true);
addPrivateFunction(INTERMEDIATE_SQL_MAX, MinMaxAggTypeComputer.INSTANCE, true);
@@ -1695,6 +1712,7 @@
addPrivateFunction(GLOBAL_SQL_MIN, MinMaxAggTypeComputer.INSTANCE, true);
addFunction(SCALAR_SQL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
addFunction(SCALAR_SQL_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addFunction(SCALAR_SQL_COUNTN, CountNTypeComputer.INSTANCE, true);
addFunction(SCALAR_SQL_MAX, scalarMinMaxTypeComputer, true);
addFunction(SCALAR_SQL_MIN, scalarMinMaxTypeComputer, true);
addPrivateFunction(INTERMEDIATE_SQL_AVG, LocalAvgTypeComputer.INSTANCE, true);
@@ -1743,6 +1761,7 @@
addPrivateFunction(SERIAL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_COUNTN, CountNTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_GLOBAL_AVG, NullableDoubleTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_LOCAL_AVG, LocalAvgTypeComputer.INSTANCE, true);
addPrivateFunction(SERIAL_INTERMEDIATE_AVG, LocalAvgTypeComputer.INSTANCE, true);
@@ -1782,6 +1801,11 @@
addFunction(SQL_COUNT_DISTINCT, AInt64TypeComputer.INSTANCE, true);
addFunction(SCALAR_SQL_COUNT_DISTINCT, AInt64TypeComputer.INSTANCE, true);
+ addFunction(COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+ addFunction(SCALAR_COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+ addFunction(SQL_COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+ addFunction(SCALAR_SQL_COUNTN_DISTINCT, CountNTypeComputer.INSTANCE, true);
+
addFunction(SUM_DISTINCT, NumericSumAggTypeComputer.INSTANCE, true);
addFunction(SCALAR_SUM_DISTINCT, scalarNumericSumTypeComputer, true);
addFunction(SQL_SUM_DISTINCT, NumericSumAggTypeComputer.INSTANCE, true);
@@ -2208,6 +2232,26 @@
addDistinctAgg(COUNT_DISTINCT, COUNT);
addScalarAgg(COUNT_DISTINCT, SCALAR_COUNT_DISTINCT);
+ // COUNTN
+
+ addAgg(COUNTN);
+ addLocalAgg(COUNTN, COUNTN);
+ addIntermediateAgg(COUNTN, SUM);
+ addGlobalAgg(COUNTN, SUM);
+
+ addScalarAgg(COUNTN, SCALAR_COUNTN);
+
+ addSerialAgg(COUNTN, SERIAL_COUNTN);
+ addAgg(SERIAL_COUNTN);
+ addLocalAgg(SERIAL_COUNTN, SERIAL_COUNTN);
+ addIntermediateAgg(SERIAL_COUNTN, SERIAL_SUM);
+ addGlobalAgg(SERIAL_COUNTN, SERIAL_SUM);
+
+ // COUNTN DISTINCT
+
+ addDistinctAgg(COUNTN_DISTINCT, COUNTN);
+ addScalarAgg(COUNTN_DISTINCT, SCALAR_COUNTN_DISTINCT);
+
// MAX
addAgg(MAX);
addAgg(LOCAL_MAX);
@@ -2719,6 +2763,26 @@
addDistinctAgg(SQL_COUNT_DISTINCT, SQL_COUNT);
addScalarAgg(SQL_COUNT_DISTINCT, SCALAR_SQL_COUNT_DISTINCT);
+ // SQL COUNTN
+
+ addAgg(SQL_COUNTN);
+ addLocalAgg(SQL_COUNTN, SQL_COUNTN);
+ addIntermediateAgg(SQL_COUNTN, SQL_SUM);
+ addGlobalAgg(SQL_COUNTN, SQL_SUM);
+
+ addScalarAgg(SQL_COUNTN, SCALAR_SQL_COUNTN);
+
+ addSerialAgg(SQL_COUNTN, SERIAL_SQL_COUNTN);
+ addAgg(SERIAL_SQL_COUNTN);
+ addLocalAgg(SERIAL_SQL_COUNTN, SERIAL_SQL_COUNTN);
+ addIntermediateAgg(SERIAL_SQL_COUNTN, SERIAL_SQL_SUM);
+ addGlobalAgg(SERIAL_SQL_COUNTN, SERIAL_SQL_SUM);
+
+ // SQL COUNTN DISTINCT
+
+ addDistinctAgg(SQL_COUNTN_DISTINCT, SQL_COUNTN);
+ addScalarAgg(SQL_COUNTN_DISTINCT, SCALAR_SQL_COUNTN_DISTINCT);
+
// SQL MAX
addAgg(SQL_MAX);
addAgg(LOCAL_SQL_MAX);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CountNTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CountNTypeComputer.java
new file mode 100644
index 0000000..4178315
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CountNTypeComputer.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.om.typecomputer.impl;
+
+import org.apache.asterix.om.typecomputer.base.AbstractResultTypeComputer;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+
+public class CountNTypeComputer extends AbstractResultTypeComputer {
+
+ public static final CountNTypeComputer INSTANCE = new CountNTypeComputer();
+
+ private CountNTypeComputer() {
+ }
+
+ protected IAType getResultType(ILogicalExpression expr, IAType... strippedInputTypes) throws AlgebricksException {
+ return AUnionType.createNullableType(BuiltinType.AINT64);
+ }
+}
diff --git a/asterixdb/asterix-runtime/pom.xml b/asterixdb/asterix-runtime/pom.xml
index 138c4dd..d7a71bf 100644
--- a/asterixdb/asterix-runtime/pom.xml
+++ b/asterixdb/asterix-runtime/pom.xml
@@ -151,8 +151,8 @@
<artifactId>commons-collections4</artifactId>
</dependency>
<dependency>
- <groupId>com.esri.geometry</groupId>
- <artifactId>esri-geometry-api</artifactId>
+ <groupId>org.locationtech.jts</groupId>
+ <artifactId>jts-core</artifactId>
</dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNAggregateDescriptor.java
new file mode 100644
index 0000000..fb2fbc8
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNAggregateDescriptor.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.CountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarCountNAggregateDescriptor extends AbstractScalarAggregateDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_COUNTN;
+
+ public static final IFunctionDescriptorFactory FACTORY = ScalarCountNAggregateDescriptor::new;
+
+ private ScalarCountNAggregateDescriptor() {
+ super(CountNAggregateDescriptor.FACTORY);
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNDistinctAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNDistinctAggregateDescriptor.java
new file mode 100644
index 0000000..6f4cf3a
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarCountNDistinctAggregateDescriptor.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.CountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarCountNDistinctAggregateDescriptor extends AbstractScalarDistinctAggregateDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_COUNTN_DISTINCT;
+
+ public static final IFunctionDescriptorFactory FACTORY =
+ createDescriptorFactory(ScalarCountNDistinctAggregateDescriptor::new);
+
+ private ScalarCountNDistinctAggregateDescriptor() {
+ super(CountNAggregateDescriptor.FACTORY);
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNAggregateDescriptor.java
new file mode 100644
index 0000000..c0925d4
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNAggregateDescriptor.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.SqlCountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarSqlCountNAggregateDescriptor extends AbstractScalarAggregateDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_SQL_COUNTN;
+
+ public static final IFunctionDescriptorFactory FACTORY = ScalarSqlCountNAggregateDescriptor::new;
+
+ private ScalarSqlCountNAggregateDescriptor() {
+ super(SqlCountNAggregateDescriptor.FACTORY);
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNDistinctAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNDistinctAggregateDescriptor.java
new file mode 100644
index 0000000..a163b8f
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/ScalarSqlCountNDistinctAggregateDescriptor.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.aggregates.scalar;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.std.SqlCountNAggregateDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class ScalarSqlCountNDistinctAggregateDescriptor extends AbstractScalarDistinctAggregateDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final FunctionIdentifier FID = BuiltinFunctions.SCALAR_SQL_COUNTN_DISTINCT;
+
+ public static final IFunctionDescriptorFactory FACTORY =
+ createDescriptorFactory(ScalarSqlCountNDistinctAggregateDescriptor::new);
+
+ private ScalarSqlCountNDistinctAggregateDescriptor() {
+ super(SqlCountNAggregateDescriptor.FACTORY);
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
index 03dacd2..1e611b9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
@@ -81,7 +81,7 @@
if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
processNull(state, start);
} else {
- cnt++;
+ cnt = processValue(typeTag, cnt);
}
BufferSerDeUtil.writeBoolean(metNull, state, start + MET_NULL_OFFSET);
BufferSerDeUtil.writeLong(cnt, state, start + COUNT_OFFSET);
@@ -111,4 +111,8 @@
protected void processNull(byte[] state, int start) {
BufferSerDeUtil.writeBoolean(true, state, start + MET_NULL_OFFSET);
}
+
+ protected long processValue(ATypeTag tag, long cnt) {
+ return cnt + 1;
+ }
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateDescriptor.java
new file mode 100644
index 0000000..113c514
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateDescriptor.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractSerializableAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. Returns NULL if MISSING or NULL is encountered.
+ */
+public class SerializableCountNAggregateDescriptor extends AbstractSerializableAggregateFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public static final IFunctionDescriptorFactory FACTORY = SerializableCountNAggregateDescriptor::new;
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return BuiltinFunctions.SERIAL_COUNTN;
+ }
+
+ @Override
+ public ISerializedAggregateEvaluatorFactory createSerializableAggregateEvaluatorFactory(
+ final IScalarEvaluatorFactory[] args) {
+ return new ISerializedAggregateEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ISerializedAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx)
+ throws HyracksDataException {
+ return new SerializableCountNAggregateFunction(args, ctx, sourceLoc);
+ }
+ };
+ }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateFunction.java
new file mode 100644
index 0000000..020d367
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableCountNAggregateFunction.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. Returns NULL if MISSING or NULL is encountered.
+ */
+public class SerializableCountNAggregateFunction extends AbstractSerializableCountAggregateFunction {
+ public SerializableCountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
+ SourceLocation sourceLoc) throws HyracksDataException {
+ super(args, context, sourceLoc);
+ }
+
+ @Override
+ protected long processValue(ATypeTag typeTag, long cnt) {
+ if (ATypeHierarchy.getTypeDomain(typeTag) == ATypeHierarchy.Domain.NUMERIC) {
+ return cnt + 1;
+ }
+ return cnt;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateDescriptor.java
new file mode 100644
index 0000000..38c5150
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateDescriptor.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractSerializableAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.ISerializedAggregateEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. NULLs and MISSINGs are ignored.
+ */
+public class SerializableSqlCountNAggregateDescriptor extends AbstractSerializableAggregateFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public static final IFunctionDescriptorFactory FACTORY = SerializableSqlCountNAggregateDescriptor::new;
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return BuiltinFunctions.SERIAL_SQL_COUNTN;
+ }
+
+ @Override
+ public ISerializedAggregateEvaluatorFactory createSerializableAggregateEvaluatorFactory(
+ final IScalarEvaluatorFactory[] args) {
+ return new ISerializedAggregateEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ISerializedAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx)
+ throws HyracksDataException {
+ return new SerializableSqlCountNAggregateFunction(args, ctx, sourceLoc);
+ }
+ };
+ }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateFunction.java
new file mode 100644
index 0000000..41a1df3
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlCountNAggregateFunction.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.serializable.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. NULLs and MISSINGs are ignored.
+ */
+public class SerializableSqlCountNAggregateFunction extends AbstractSerializableCountAggregateFunction {
+ public SerializableSqlCountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
+ SourceLocation sourceLoc) throws HyracksDataException {
+ super(args, context, sourceLoc);
+ }
+
+ @Override
+ protected void processNull(byte[] state, int start) {
+ }
+
+ @Override
+ protected long processValue(ATypeTag typeTag, long cnt) {
+ if (ATypeHierarchy.getTypeDomain(typeTag) == ATypeHierarchy.Domain.NUMERIC) {
+ return cnt + 1;
+ }
+ return cnt;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
index 5756f00..1296d82 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
@@ -23,6 +23,7 @@
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.om.base.AInt64;
import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.ANull;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.EnumDeserializer;
@@ -45,9 +46,13 @@
@SuppressWarnings("unchecked")
private ISerializerDeserializer<AInt64> int64Serde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator eval;
protected long cnt;
+ protected boolean nullRes;
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@@ -60,6 +65,7 @@
@Override
public void init() throws HyracksDataException {
cnt = 0;
+ nullRes = false;
}
@Override
@@ -71,7 +77,7 @@
if (typeTag == ATypeTag.NULL || typeTag == ATypeTag.MISSING) {
processNull();
} else if (typeTag != ATypeTag.SYSTEM_NULL) {
- cnt++;
+ processValue(typeTag);
}
}
@@ -79,8 +85,12 @@
public void finish(IPointable resultPointable) throws HyracksDataException {
resultStorage.reset();
try {
- result.setValue(cnt);
- int64Serde.serialize(result, resultStorage.getDataOutput());
+ if (nullRes) {
+ nullSerde.serialize(ANull.NULL, resultStorage.getDataOutput());
+ } else {
+ result.setValue(cnt);
+ int64Serde.serialize(result, resultStorage.getDataOutput());
+ }
} catch (IOException e) {
throw HyracksDataException.create(e);
}
@@ -93,4 +103,8 @@
}
protected abstract void processNull();
+
+ protected void processValue(ATypeTag tag) {
+ cnt++;
+ }
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateDescriptor.java
new file mode 100644
index 0000000..97cd1ad
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateDescriptor.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. Returns NULL if MISSING or NULL is encountered.
+ */
+public class CountNAggregateDescriptor extends AbstractAggregateFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public static final IFunctionDescriptorFactory FACTORY = CountNAggregateDescriptor::new;
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return BuiltinFunctions.COUNTN;
+ }
+
+ @Override
+ public IAggregateEvaluatorFactory createAggregateEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
+ return new IAggregateEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public IAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx) throws HyracksDataException {
+ return new CountNAggregateFunction(args, ctx, sourceLoc);
+ }
+ };
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateFunction.java
new file mode 100644
index 0000000..c2bcb5f
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/CountNAggregateFunction.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. Returns NULL if MISSING or NULL is encountered.
+ */
+public class CountNAggregateFunction extends AbstractCountAggregateFunction {
+
+ public CountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context, SourceLocation sourceLoc)
+ throws HyracksDataException {
+ super(args, context, sourceLoc);
+ }
+
+ @Override
+ protected void processNull() {
+ nullRes = true;
+ }
+
+ @Override
+ protected void processValue(ATypeTag tag) {
+ if (ATypeHierarchy.getTypeDomain(tag) == ATypeHierarchy.Domain.NUMERIC) {
+ cnt++;
+ }
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateDescriptor.java
new file mode 100644
index 0000000..d68d0bb
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateDescriptor.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.runtime.aggregates.base.AbstractAggregateFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Counts the number of numeric items. NULLs and MISSINGs are ignored.
+ */
+public class SqlCountNAggregateDescriptor extends AbstractAggregateFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public static final IFunctionDescriptorFactory FACTORY = SqlCountNAggregateDescriptor::new;
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return BuiltinFunctions.SQL_COUNTN;
+ }
+
+ @Override
+ public IAggregateEvaluatorFactory createAggregateEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
+ return new IAggregateEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public IAggregateEvaluator createAggregateEvaluator(IEvaluatorContext ctx) throws HyracksDataException {
+ return new SqlCountNAggregateFunction(args, ctx, sourceLoc);
+ }
+ };
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateFunction.java
new file mode 100644
index 0000000..db677ab
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlCountNAggregateFunction.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.aggregates.std;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.SourceLocation;
+
+/**
+ * COUNTN returns the number of numeric items in the given list. NULLs and MISSINGs are ignored.
+ */
+public class SqlCountNAggregateFunction extends AbstractCountAggregateFunction {
+
+ public SqlCountNAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
+ SourceLocation sourceLoc) throws HyracksDataException {
+ super(args, context, sourceLoc);
+ }
+
+ @Override
+ protected void processNull() {
+ }
+
+ @Override
+ protected void processValue(ATypeTag tag) {
+ if (ATypeHierarchy.getTypeDomain(tag) == ATypeHierarchy.Domain.NUMERIC) {
+ cnt++;
+ }
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
index 4f67fba..5615e14 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
@@ -45,8 +45,8 @@
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-import com.esri.core.geometry.Envelope;
+import org.locationtech.jts.geom.Envelope;
+import org.locationtech.jts.geom.Geometry;
public class CreateMBREvalFactory implements IScalarEvaluatorFactory {
@@ -257,25 +257,25 @@
}
break;
case GEOMETRY:
- Envelope record = new Envelope();
- AGeometrySerializerDeserializer.getAGeometryObject(data0, startOffset0 + 1)
- .getGeometry().getEsriGeometry().queryEnvelope(record);
+ Geometry geometry = AGeometrySerializerDeserializer
+ .getAGeometryObject(data0, startOffset0 + 1).getGeometry();
+ Envelope envelope = geometry.getEnvelopeInternal();
switch (coordinate) {
case 0:
- value = record.getXMin();
+ value = envelope.getMinX();
break;
case 1:
- value = record.getYMin();
+ value = envelope.getMinY();
break;
case 2:
- value = record.getXMax();
+ value = envelope.getMaxX();
break;
case 3:
- value = record.getYMax();
+ value = envelope.getMaxY();
break;
default:
- throw new NotImplementedException(
- coordinate + "is not a valid coordinate option");
+ throw new IllegalArgumentException(
+ coordinate + " is not a valid coordinate option");
}
break;
case CIRCLE:
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
index 86a0790..d682d7a 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
@@ -37,6 +37,8 @@
import org.apache.asterix.runtime.aggregates.scalar.ScalarAvgDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarCountDistinctAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarCountNAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarCountNDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarFirstElementAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarKurtosisDistinctAggregateDescriptor;
@@ -50,6 +52,8 @@
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlAvgDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountDistinctAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountNAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlCountNDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlKurtosisDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.scalar.ScalarSqlMaxAggregateDescriptor;
@@ -83,6 +87,7 @@
import org.apache.asterix.runtime.aggregates.scalar.ScalarVarPopDistinctAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableCountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.serializable.std.SerializableCountNAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableGlobalSkewnessAggregateDescriptor;
@@ -135,6 +140,7 @@
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlCountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlCountNAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlSkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableSqlStddevAggregateDescriptor;
@@ -149,6 +155,7 @@
import org.apache.asterix.runtime.aggregates.serializable.std.SerializableVarPopAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.AvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.CountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.std.CountNAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.GlobalMaxAggregateDescriptor;
@@ -226,6 +233,7 @@
import org.apache.asterix.runtime.aggregates.std.SkewnessAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlAvgAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlCountAggregateDescriptor;
+import org.apache.asterix.runtime.aggregates.std.SqlCountNAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlKurtosisAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlMaxAggregateDescriptor;
import org.apache.asterix.runtime.aggregates.std.SqlMedianAggregateDescriptor;
@@ -698,6 +706,7 @@
// aggregate functions
fc.add(ListifyAggregateDescriptor.FACTORY);
fc.add(CountAggregateDescriptor.FACTORY);
+ fc.add(CountNAggregateDescriptor.FACTORY);
fc.add(AvgAggregateDescriptor.FACTORY);
fc.add(LocalAvgAggregateDescriptor.FACTORY);
fc.add(IntermediateAvgAggregateDescriptor.FACTORY);
@@ -753,6 +762,7 @@
// serializable aggregates
fc.add(SerializableCountAggregateDescriptor.FACTORY);
+ fc.add(SerializableCountNAggregateDescriptor.FACTORY);
fc.add(SerializableAvgAggregateDescriptor.FACTORY);
fc.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateAvgAggregateDescriptor.FACTORY);
@@ -790,7 +800,9 @@
fc.add(ScalarArrayAggAggregateDescriptor.FACTORY);
fc.add(ScalarArrayAggDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarCountAggregateDescriptor.FACTORY);
+ fc.add(ScalarCountNAggregateDescriptor.FACTORY);
fc.add(ScalarCountDistinctAggregateDescriptor.FACTORY);
+ fc.add(ScalarCountNDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarAvgAggregateDescriptor.FACTORY);
fc.add(ScalarAvgDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSumAggregateDescriptor.FACTORY);
@@ -816,6 +828,7 @@
// SQL aggregates
fc.add(SqlCountAggregateDescriptor.FACTORY);
+ fc.add(SqlCountNAggregateDescriptor.FACTORY);
fc.add(SqlAvgAggregateDescriptor.FACTORY);
fc.add(LocalSqlAvgAggregateDescriptor.FACTORY);
fc.add(IntermediateSqlAvgAggregateDescriptor.FACTORY);
@@ -867,6 +880,7 @@
// SQL serializable aggregates
fc.add(SerializableSqlCountAggregateDescriptor.FACTORY);
+ fc.add(SerializableSqlCountNAggregateDescriptor.FACTORY);
fc.add(SerializableSqlAvgAggregateDescriptor.FACTORY);
fc.add(SerializableLocalSqlAvgAggregateDescriptor.FACTORY);
fc.add(SerializableIntermediateSqlAvgAggregateDescriptor.FACTORY);
@@ -903,6 +917,8 @@
// SQL scalar aggregates
fc.add(ScalarSqlCountAggregateDescriptor.FACTORY);
fc.add(ScalarSqlCountDistinctAggregateDescriptor.FACTORY);
+ fc.add(ScalarSqlCountNDistinctAggregateDescriptor.FACTORY);
+ fc.add(ScalarSqlCountNAggregateDescriptor.FACTORY);
fc.add(ScalarSqlAvgAggregateDescriptor.FACTORY);
fc.add(ScalarSqlAvgDistinctAggregateDescriptor.FACTORY);
fc.add(ScalarSqlSumAggregateDescriptor.FACTORY);
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index e65c742..82534fb 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -612,6 +612,22 @@
<contentFile>protobuf_2.5.0_LICENSE.txt</contentFile>
</license>
<license>
+ <displayName>Eclipse Distribution License - v 1.0</displayName>
+ <url>https://www.eclipse.org/org/documents/edl-v10.php</url>
+ <aliasUrls>
+ <aliasUrl>https://github.com/locationtech/jts/blob/master/LICENSE_EDLv1.txt</aliasUrl>
+ </aliasUrls>
+ <metric>1050</metric>
+ </license>
+ <license>
+ <displayName>Eclipse Public License, Version 2.0</displayName>
+ <url>https://www.eclipse.org/legal/epl-2.0/</url>
+ <aliasUrls>
+ <aliasUrl>https://github.com/locationtech/jts/blob/master/LICENSE_EPLv2.txt</aliasUrl>
+ </aliasUrls>
+ <metric>1100</metric>
+ </license>
+ <license>
<displayName>a BSD 3-clause license</displayName>
<url>https://github.com/codehaus/paranamer-git/blob/paranamer-2.3/LICENSE.txt</url>
<contentFile>paranamer-2.3_LICENSE.txt</contentFile>
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index cb01868..94bd6e1 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -752,14 +752,6 @@
</profile>
<profile>
<id>azurite-tests</id>
- <activation>
- <os>
- <family>unix</family>
- </os>
- <property>
- <name>!skipTests</name>
- </property>
- </activation>
<properties>
<azurite.npm.install.stage>process-classes</azurite.npm.install.stage>
<azurite.install.stage>generate-test-resources</azurite.install.stage>
@@ -1554,9 +1546,9 @@
<version>3.8.4</version>
</dependency>
<dependency>
- <groupId>com.esri.geometry</groupId>
- <artifactId>esri-geometry-api</artifactId>
- <version>2.0.0</version>
+ <groupId>org.locationtech.jts</groupId>
+ <artifactId>jts-core</artifactId>
+ <version>1.19.0</version>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
diff --git a/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j--1.16.1_MIT_License.txt b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j--1.16.1_MIT_License.txt
new file mode 100644
index 0000000..d1ca00f
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j--1.16.1_MIT_License.txt
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Microsoft Corporation. All rights reserved.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j-persistence-extension--1.3.0_MIT_License.txt b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j-persistence-extension--1.3.0_MIT_License.txt
new file mode 100644
index 0000000..d1ca00f
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/com.microsoft.azure--msal4j-persistence-extension--1.3.0_MIT_License.txt
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Microsoft Corporation. All rights reserved.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/github.com_locationtech_jts_blob_master_LICENSE_EDLv1.txt b/asterixdb/src/main/licenses/content/github.com_locationtech_jts_blob_master_LICENSE_EDLv1.txt
new file mode 100644
index 0000000..1071fed
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/github.com_locationtech_jts_blob_master_LICENSE_EDLv1.txt
@@ -0,0 +1,30 @@
+Eclipse Distribution License - v 1.0
+
+Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+ Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ Neither the name of the Eclipse Foundation, Inc. nor the names of its
+ contributors may be used to endorse or promote products derived from this
+ software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/opensource.org_licenses_BSD-2-Clause.txt b/asterixdb/src/main/licenses/content/opensource.org_licenses_BSD-2-Clause.txt
new file mode 100644
index 0000000..226190a
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/opensource.org_licenses_BSD-2-Clause.txt
@@ -0,0 +1,24 @@
+BSD 2-Clause License
+
+Copyright (c) 2024, Couchbase, Inc.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/templates/source_licenses.ftl b/asterixdb/src/main/licenses/templates/source_licenses.ftl
index 97b032b..95b6f3a 100644
--- a/asterixdb/src/main/licenses/templates/source_licenses.ftl
+++ b/asterixdb/src/main/licenses/templates/source_licenses.ftl
@@ -133,4 +133,196 @@
</#if>
<#if !asterixDashboardSkip!false>
<#include "../../../../asterix-dashboard/src/main/licenses/dashboard-source-license.ftl">
-</#if>
\ No newline at end of file
+</#if>
+<@license component="AsterixDB OM" files=[
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/JtsModule.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiLineStringParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PointParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPointParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/MultiPolygonParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/LineStringParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/BaseParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GeometryCollectionParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/GenericGeometryParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/parsers/PolygonParser.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoFunctionUtils.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/GeoJsonConstants.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometryDeserializer.java",
+ "org/apache/asterix/dataflow/data/nontagged/serde/jacksonjts/serde/GeometrySerializer.java"
+ ]>
+ Classes are modified to support the org.locationtech.jts package instead of com.vividsolutions.jts
+ Copyright BeDataDriven
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+ 1. Definitions.
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+ END OF TERMS AND CONDITIONS
+ APPENDIX: How to apply the Apache License to your work.
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+ Copyright [yyyy] [name of copyright owner]
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ For more details on the original project and licensing, please visit
+ https://github.com/bedatadriven/jackson-datatype-jts.
+</@license>
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
index 98c4223..465cd29 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
@@ -46,4 +46,5 @@
StorageUtil.getIntSizeInBytes(8, StorageUtil.StorageUnit.KILOBYTE);
public static final boolean BATCH_LOOKUP_DEFAULT = true;
public static final boolean COLUMN_FILTER_DEFAULT = true;
+ public static final int MAX_VARIABLE_OCCURRENCES_INLINING_DEFAULT = 128;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
index 11171a1..07e8bfc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
@@ -62,6 +62,7 @@
private static final String MIN_JOIN_FRAMES = "MIN_JOIN_FRAMES";
private static final String MIN_GROUP_FRAMES = "MIN_GROUP_FRAMES";
private static final String MIN_WINDOW_FRAMES = "MIN_WINDOW_FRAMES";
+ private static final String MAX_VARIABLE_OCCURRENCES_INLINING = "MAX_VARIABLE_OCCURRENCES_INLINING";
private final Properties properties = new Properties();
@@ -383,6 +384,14 @@
return properties.getProperty(property);
}
+ public int getMaxVariableOccurrencesForInlining() {
+ return getInt(MAX_VARIABLE_OCCURRENCES_INLINING, AlgebricksConfig.MAX_VARIABLE_OCCURRENCES_INLINING_DEFAULT);
+ }
+
+ public void setMaxVariableOccurrencesForInlining(int maxVariableOccurrencesForInlining) {
+ setInt(MAX_VARIABLE_OCCURRENCES_INLINING, maxVariableOccurrencesForInlining);
+ }
+
private void setInt(String property, int value) {
properties.setProperty(property, Integer.toString(value));
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
index 6ad50e6..5970f4f 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
@@ -19,6 +19,7 @@
package org.apache.hyracks.algebricks.rewriter.rules;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -84,6 +85,7 @@
private final List<LogicalVariable> usedVars = new ArrayList<>();
// map of variables and the counts of how many times they were used
private final Map<LogicalVariable, MutableInt> usedVariableCounter = new HashMap<>();
+ private final Map<LogicalVariable, Map<LogicalVariable, Integer>> totalLeafVariableCounter = new HashMap<>();
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
@@ -113,6 +115,7 @@
inlineVisitor.setContext(context);
subTreesDone.clear();
usedVariableCounter.clear();
+ totalLeafVariableCounter.clear();
}
protected boolean performBottomUpAction(ILogicalOperator op) throws AlgebricksException {
@@ -168,6 +171,20 @@
}
}
+ if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+ AssignOperator assignOp = (AssignOperator) op;
+ computeLeafVariablesCount(assignOp);
+ List<LogicalVariable> vars = assignOp.getVariables();
+ for (LogicalVariable variable : vars) {
+ // Don't inline variables that potentially reference a large number of the same leaf variable.
+ Map<LogicalVariable, Integer> varMap = totalLeafVariableCounter.get(variable);
+ if (varMap != null && !varMap.isEmpty() && Collections.max(varMap.values()) > context
+ .getPhysicalOptimizationConfig().getMaxVariableOccurrencesForInlining()) {
+ varAssignRhs.remove(variable);
+ }
+ }
+ }
+
// Descend into subplan
if (op.getOperatorTag() == LogicalOperatorTag.SUBPLAN || op.getOperatorTag() == LogicalOperatorTag.WINDOW) {
List<ILogicalPlan> nestedPlans = ((AbstractOperatorWithNestedPlans) op).getNestedPlans();
@@ -259,6 +276,28 @@
}
}
+ private void computeLeafVariablesCount(AssignOperator assignOp) {
+ List<LogicalVariable> vars = assignOp.getVariables();
+ List<Mutable<ILogicalExpression>> exprs = assignOp.getExpressions();
+ for (int i = 0; i < vars.size(); i++) {
+ LogicalVariable variable = vars.get(i);
+ ILogicalExpression expr = exprs.get(i).getValue();
+ usedVars.clear();
+ expr.getUsedVariables(usedVars);
+ Map<LogicalVariable, Integer> varMap =
+ totalLeafVariableCounter.computeIfAbsent(variable, k -> new HashMap<>());
+ for (LogicalVariable usedVar : usedVars) {
+ if (totalLeafVariableCounter.containsKey(usedVar)) {
+ for (Map.Entry<LogicalVariable, Integer> entry : totalLeafVariableCounter.get(usedVar).entrySet()) {
+ varMap.put(entry.getKey(), entry.getValue() + varMap.getOrDefault(entry.getKey(), 0));
+ }
+ } else {
+ varMap.put(usedVar, 1);
+ }
+ }
+ }
+ }
+
public static class InlineVariablesVisitor extends LogicalExpressionReferenceTransformVisitor
implements ILogicalExpressionReferenceTransform {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
index 59a4da4..02327e2 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
@@ -158,6 +158,8 @@
UNSUPPORTED_WRITE_SPEC(128),
JOB_REJECTED(129),
FRAME_BIGGER_THAN_SORT_MEMORY(130),
+ RESULT_DOES_NOT_FOLLOW_SCHEMA(131),
+ EXTRA_FIELD_IN_RESULT_NOT_FOUND_IN_SCHEMA(132),
// Compilation error codes.
RULECOLLECTION_NOT_INSTANCE_OF_LIST(10000),
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
index 5045c86..4022961 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
@@ -332,6 +332,10 @@
public static String toString(byte[] bytes, int start) {
StringBuilder builder = new StringBuilder();
+ return toString(bytes, start, builder);
+ }
+
+ public static String toString(byte[] bytes, int start, StringBuilder builder) {
return toString(builder, bytes, start).toString();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
index e94c12e..226234f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
@@ -148,6 +148,8 @@
128 = Unsupported copy to specification: PARTITION BY %1$s, ORDER BY %2$s
129 = Job %1$s failed to run. Cluster is not accepting jobs.
130 = Frame data=%1$s (requiring %2$s) is bigger than the sort budget. Used=%3$s, max=%4$s. Please increase the sort memory budget.
+131 = Result does not follow the schema, %1$s type expected but found %2$s type at '%3$s'
+132 = Extra field in the result, field '%1$s' does not exist at '%2$s' in the schema
10000 = The given rule collection %1$s is not an instance of the List class.
10001 = Cannot compose partition constraint %1$s with %2$s
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 51bdd2f..20c3e21 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -75,7 +75,7 @@
<snappy.version>1.1.10.5</snappy.version>
<jackson.version>2.14.3</jackson.version>
<jackson-databind.version>${jackson.version}</jackson-databind.version>
- <netty.version>4.1.101.Final</netty.version>
+ <netty.version>4.1.112.Final</netty.version>
<asm.version>9.3</asm.version>
<implementation.title>Apache Hyracks and Algebricks - ${project.name}</implementation.title>