Merge branch 'gerrit/ionic' into 'master'

Change-Id: I1b351980955c28beb7e8d6d6eb99075092c88916
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceRequestParameters.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceRequestParameters.java
index e8cb86d..de5ac26 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceRequestParameters.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceRequestParameters.java
@@ -19,8 +19,6 @@
 
 package org.apache.asterix.api.http.server;
 
-import static org.apache.asterix.utils.RedactionUtil.REDACTED_SENSITIVE_ENTRY_VALUE;
-
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -442,7 +440,12 @@
         object.put("source", source);
         if (statementParams != null) {
             for (Map.Entry<String, JsonNode> statementParam : statementParams.entrySet()) {
-                object.set('$' + statementParam.getKey(), REDACTED_SENSITIVE_ENTRY_VALUE);
+                try {
+                    String s = OBJECT_MAPPER.writeValueAsString(statementParam.getValue());
+                    object.put('$' + statementParam.getKey(), LogRedactionUtil.userData(s));
+                } catch (JsonProcessingException e) {
+                    // ignore
+                }
             }
         }
         return object;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.00.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.00.ddl.sqlpp
index 897aac5..5352c31 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.00.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.00.ddl.sqlpp
@@ -25,6 +25,4 @@
 {"id":3, "f": "a𩸽b", "f1": "🎉"},
 {"id":4, "f": "👩‍👩‍👧‍👦", "f1": "✓✓✓"},
 {"id":5, "f": null, "f1" : null}
-]);
-
-
+]);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.02.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.02.query.sqlpp
index a1383e6..8bd0175 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.02.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/lpad/lpad.02.query.sqlpp
@@ -23,5 +23,8 @@
     lpad('👩‍👩‍👧‍', 10, '👩‍👩‍👧‍👦') AS padded_to_10_codepoints,
     lpad(null, 4, '%%') AS padded_to_null_is_null,
     lpad(22, 11, 'y') AS padded_non_str,
-    lpad('abcd', -2, 'y') AS negative_length;
-
+    lpad('abcd', -2, 'y') AS negative_length,
+    lpad('abcd', 56, '') AS padding_with_empty_string,
+    lpad("abcd", 10, ' ') as padded_with_space,
+    lpad('', 3, 'a') as empty_string_padded,
+    lpad('', 3, '') as empty_string_padded_with_empty_string;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.00.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.00.ddl.sqlpp
index 897aac5..5352c31 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.00.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.00.ddl.sqlpp
@@ -25,6 +25,4 @@
 {"id":3, "f": "a𩸽b", "f1": "🎉"},
 {"id":4, "f": "👩‍👩‍👧‍👦", "f1": "✓✓✓"},
 {"id":5, "f": null, "f1" : null}
-]);
-
-
+]);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.02.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.02.query.sqlpp
index c0d1316..9f37767 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.02.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/string/pad/rpad/rpad.02.query.sqlpp
@@ -22,5 +22,9 @@
     rpad('👩‍👩‍👧‍', 2, '$') AS truncated_to_2_codepoints,
     rpad('👩‍👩‍👧‍', 10, '👩‍👩‍👧‍👦') AS padded_to_10_codepoints,
     rpad(null, 4, '%%') AS padded_to_null_is_null,
-    rpad(22, 11, 'y') AS padded_non_str;
-
+    rpad(22, 11, 'y') AS padded_non_str,
+    rpad('abcd', -2, 'y') AS negative_length,
+    rpad('abcd', 56, '') AS padding_with_empty_string,
+    rpad("abcd", 10, ' ') as padded_with_space,
+    rpad('', 3, 'a') as empty_string_padded,
+    rpad('', 3, '') as empty_string_padded_with_empty_string;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/lpad/lpad.02.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/lpad/lpad.02.adm
index 3f6d3e4..7731480 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/lpad/lpad.02.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/lpad/lpad.02.adm
@@ -1 +1 @@
-{ "padded_to_11_codepoints": "$&^$asterix", "truncated_to_2_codepoints": "👩‍", "padded_to_10_codepoints": "👩‍👩‍👩‍👩‍👧‍", "padded_to_null_is_null": null, "padded_non_str": null, "negative_length": null }
\ No newline at end of file
+{ "padded_to_11_codepoints": "$&^$asterix", "truncated_to_2_codepoints": "👩‍", "padded_to_10_codepoints": "👩‍👩‍👩‍👩‍👧‍", "padded_to_null_is_null": null, "padded_non_str": null, "negative_length": null, "padding_with_empty_string": "abcd", "padded_with_space": "      abcd", "empty_string_padded": "aaa", "empty_string_padded_with_empty_string": "" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/rpad/rpad.02.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/rpad/rpad.02.adm
index ba875f6..efac7d7 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/rpad/rpad.02.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/string/pad/rpad/rpad.02.adm
@@ -1 +1 @@
-{ "padded_to_11_codepoints": "asterix$&^$", "truncated_to_2_codepoints": "👩‍", "padded_to_10_codepoints": "👩‍👩‍👧‍👩‍👩‍", "padded_to_null_is_null": null, "padded_non_str": null }
\ No newline at end of file
+{ "padded_to_11_codepoints": "asterix$&^$", "truncated_to_2_codepoints": "👩‍", "padded_to_10_codepoints": "👩‍👩‍👧‍👩‍👩‍", "padded_to_null_is_null": null, "padded_non_str": null, "negative_length": null, "padding_with_empty_string": "abcd", "padded_with_space": "abcd      ", "empty_string_padded": "aaa", "empty_string_padded_with_empty_string": "" }
\ No newline at end of file
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java
index a9f1234..60205ec 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java
@@ -33,6 +33,7 @@
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.input.filter.embedder.IExternalFilterValueEmbedder;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStream;
+import org.apache.asterix.external.input.record.reader.stream.AvailableInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.aws.s3.S3AuthUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -74,7 +75,7 @@
         }
         // Use gzip stream if needed
         if (StringUtils.endsWithIgnoreCase(fileName, ".gz") || StringUtils.endsWithIgnoreCase(fileName, ".gzip")) {
-            in = new GZIPInputStream(in, ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+            in = new GZIPInputStream(new AvailableInputStream(in), ExternalDataConstants.DEFAULT_BUFFER_SIZE);
         }
         return true;
     }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java
index db4fdb0..b952495 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java
@@ -32,6 +32,7 @@
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.input.filter.embedder.IExternalFilterValueEmbedder;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStream;
+import org.apache.asterix.external.input.record.reader.stream.AvailableInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.util.LogRedactionUtil;
@@ -67,7 +68,7 @@
             // Use gzip stream if needed
             String lowerCaseFileName = fileName.toLowerCase();
             if (lowerCaseFileName.endsWith(".gz") || lowerCaseFileName.endsWith(".gzip")) {
-                in = new GZIPInputStream(in, ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+                in = new GZIPInputStream(new AvailableInputStream(in), ExternalDataConstants.DEFAULT_BUFFER_SIZE);
             }
         } catch (BlobStorageException ex) {
             if (ex.getErrorCode().equals(BlobErrorCode.BLOB_NOT_FOUND)) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java
index 9b16026..b13213d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java
@@ -32,6 +32,7 @@
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.input.filter.embedder.IExternalFilterValueEmbedder;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStream;
+import org.apache.asterix.external.input.record.reader.stream.AvailableInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.util.LogRedactionUtil;
@@ -67,7 +68,7 @@
             // Use gzip stream if needed
             String lowerCaseFileName = fileName.toLowerCase();
             if (lowerCaseFileName.endsWith(".gz") || lowerCaseFileName.endsWith(".gzip")) {
-                in = new GZIPInputStream(in, ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+                in = new GZIPInputStream(new AvailableInputStream(in), ExternalDataConstants.DEFAULT_BUFFER_SIZE);
             }
         } catch (BlobStorageException ex) {
             if (ex.getErrorCode().equals(BlobErrorCode.BLOB_NOT_FOUND)) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/gcs/GCSInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/gcs/GCSInputStream.java
index 1ef3fcd..307028a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/gcs/GCSInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/gcs/GCSInputStream.java
@@ -33,6 +33,7 @@
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.input.filter.embedder.IExternalFilterValueEmbedder;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStream;
+import org.apache.asterix.external.input.record.reader.stream.AvailableInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.google.gcs.GCSAuthUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -72,7 +73,7 @@
 
         // Use gzip stream if needed
         if (StringUtils.endsWithIgnoreCase(fileName, ".gz") || StringUtils.endsWithIgnoreCase(fileName, ".gzip")) {
-            in = new GZIPInputStream(in, ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+            in = new GZIPInputStream(new AvailableInputStream(in), ExternalDataConstants.DEFAULT_BUFFER_SIZE);
         }
         return true;
     }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AvailableInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AvailableInputStream.java
new file mode 100644
index 0000000..22f8df4
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AvailableInputStream.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.stream;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+public class AvailableInputStream extends InputStream {
+    private final InputStream is;
+
+    public AvailableInputStream(InputStream inputstream) {
+        is = inputstream;
+    }
+
+    public int read() throws IOException {
+        return (is.read());
+    }
+
+    public int read(byte[] b) throws IOException {
+        return (is.read(b));
+    }
+
+    public int read(byte[] b, int off, int len) throws IOException {
+        return (is.read(b, off, len));
+    }
+
+    public void close() throws IOException {
+        is.close();
+    }
+
+    public int available() throws IOException {
+        // Always say that we have 1 more byte in the
+        // buffer, even when we don't
+        int a = is.available();
+        if (a == 0) {
+            return (1);
+        } else {
+            return (a);
+        }
+    }
+}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index 63eda21..71f479f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -800,7 +800,7 @@
     @Override
     public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteDatabaseWithKeyRuntime(int sourceColumn,
             IScalarEvaluatorFactory[] keyEvaluatorFactories, IWriteDataSink sink, RecordDescriptor inputDesc,
-            Object sourceType) throws AlgebricksException {
+            Object sourceType, IWarningCollector warningCollector) throws AlgebricksException {
         throw new UnsupportedOperationException();
     }
 
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 1216f8a..ae80656 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -94,8 +94,14 @@
     <hadoop.version>3.4.1</hadoop.version>
     <jacoco.version>0.7.6.201602180812</jacoco.version>
     <log4j.version>2.22.1</log4j.version>
+    <!-- IMPORTANT: please keep the aws-crt version in sync with that defined in the AWS SDK BOM -->
+    <!-- you can get this by inspecting the aws-sdk-java-pom for the SDK version. e.g.
+     $ curl -s https://repo1.maven.org/maven2/software/amazon/awssdk/aws-sdk-java-pom/2.31.57/aws-sdk-java-pom-2.31.57.pom | grep awscrt.version
+        <awscrt.version>0.38.1</awscrt.version>
+      -->
     <awsjavasdk.version>2.29.27</awsjavasdk.version>
-    <awsjavasdk.crt.version>0.29.10</awsjavasdk.crt.version>
+    <awsjavasdk.crt.version>0.33.3</awsjavasdk.crt.version>
+
     <parquet.version>1.14.3</parquet.version>
     <hadoop-awsjavasdk.version>1.12.779</hadoop-awsjavasdk.version>
     <azureblobjavasdk.version>12.25.1</azureblobjavasdk.version>
@@ -1562,74 +1568,6 @@
         <artifactId>reflections</artifactId>
         <version>0.9.12</version>
       </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>s3</artifactId>
-        <version>${awsjavasdk.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty-transport-classes-epoll</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>regions</artifactId>
-        <version>${awsjavasdk.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>auth</artifactId>
-        <version>${awsjavasdk.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>aws-core</artifactId>
-        <version>${awsjavasdk.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>sdk-core</artifactId>
-        <version>${awsjavasdk.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-api</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>apache-client</artifactId>
-        <version>${awsjavasdk.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>http-client-spi</artifactId>
-        <version>${awsjavasdk.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>s3-transfer-manager</artifactId>
-        <version>${awsjavasdk.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk</groupId>
-        <artifactId>sts</artifactId>
-        <version>${awsjavasdk.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>software.amazon.awssdk.crt</groupId>
-        <artifactId>aws-crt</artifactId>
-        <version>${awsjavasdk.crt.version}</version>
-      </dependency>
       <!-- Mock for Adobe AWS S3 -->
       <dependency>
         <groupId>com.adobe.testing</groupId>
@@ -1987,6 +1925,23 @@
         <artifactId>protobuf-java-util</artifactId>
         <version>${protobuf-java.version}</version>
       </dependency>
+      <!-- IMPORTANT: please keep the aws-crt version in sync with that defined in the AWS SDK BOM -->
+      <!-- you can get this by inspecting the aws-sdk-java-pom for the SDK version:
+       $ curl -s https://repo1.maven.org/maven2/software/amazon/awssdk/aws-sdk-java-pom/2.31.57/aws-sdk-java-pom-2.31.57.pom | grep awscrt.version
+        <awscrt.version>0.38.1</awscrt.version>
+        -->
+      <dependency>
+        <groupId>software.amazon.awssdk</groupId>
+        <artifactId>bom</artifactId>
+        <version>${awsjavasdk.version}</version>
+        <type>pom</type>
+        <scope>import</scope>
+      </dependency>
+      <dependency>
+        <groupId>software.amazon.awssdk.crt</groupId>
+        <artifactId>aws-crt</artifactId>
+        <version>${awsjavasdk.crt.version}</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
index caac0ae..20fc9c5 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
@@ -41,6 +41,7 @@
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.IWarningCollector;
 import org.apache.hyracks.api.exceptions.SourceLocation;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.api.result.IResultMetadata;
@@ -68,7 +69,7 @@
 
     Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteDatabaseWithKeyRuntime(int sourceColumn,
             IScalarEvaluatorFactory[] keyEvaluatorFactories, IWriteDataSink sink, RecordDescriptor inputDesc,
-            Object sourceType) throws AlgebricksException;
+            Object sourceType, IWarningCollector warningCollector) throws AlgebricksException;
 
     Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getResultHandleRuntime(IDataSink sink, int[] printColumns,
             IPrinterFactory[] printerFactories, IAWriterFactory writerFactory,
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
index 0428cd1..addb6a4 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
@@ -181,7 +181,7 @@
 
         } else {
             runtimeAndConstraints = mp.getWriteDatabaseWithKeyRuntime(sourceColumn, keyEvalFactories, writeDataSink,
-                    inputDesc, typeEnv.getVarType(sourceVariable));
+                    inputDesc, typeEnv.getVarType(sourceVariable), context.getWarningCollector());
         }
 
         IPushRuntimeFactory runtime = runtimeAndConstraints.first;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
index 7c7111f..9ecd165 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
@@ -393,7 +393,7 @@
         run.setStartTime(System.currentTimeMillis());
         run.setStartTimeZoneId(ZoneId.systemDefault().getId());
         JobId jobId = run.getJobId();
-        logJobCapacity(run, "running", Level.DEBUG);
+        logJobCapacity(run, "running", Level.INFO);
         activeRunMap.put(jobId, run);
         run.setStatus(JobStatus.RUNNING, null);
         executeJobInternal(run);
@@ -437,7 +437,7 @@
     private void releaseJobCapacity(JobRun jobRun) {
         final JobSpecification job = jobRun.getJobSpecification();
         jobCapacityController.release(job);
-        logJobCapacity(jobRun, "released", Level.DEBUG);
+        logJobCapacity(jobRun, "released", Level.INFO);
     }
 
     private void logJobCapacity(JobRun jobRun, String jobStateDesc, Level lvl) {
@@ -451,7 +451,8 @@
             return;
         }
         IReadOnlyClusterCapacity clusterCapacity = jobCapacityController.getClusterCapacity();
-        LOGGER.log(lvl, "{} {}, memory={}, cpu={}, (new) cluster memory={}, cpu={}, currently running={}, queued={}",
+        LOGGER.log(lvl,
+                "{} {}, job memory={}, cpu={}, (new) cluster memory={}, cpu={}, currently running={}, queued={}",
                 jobStateDesc, jobRun.getJobId(), requiredMemory, requiredCPUs,
                 clusterCapacity.getAggregatedMemoryByteSize(), clusterCapacity.getAggregatedCores(),
                 getRunningJobsCount(), jobQueue.size());
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
index 81afdf6..3744350 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
@@ -483,13 +483,15 @@
             throws IOException {
         int utfLen = src.getUTF8Length();
         int byteIdx = 0;
-        while (numCodePoints > 0) {
-            if (byteIdx == utfLen) {
-                byteIdx = 0;
+        if (utfLen > 0) {
+            while (numCodePoints > 0) {
+                if (byteIdx == utfLen) {
+                    byteIdx = 0;
+                }
+                builder.appendCodePoint(src.codePointAt(src.getMetaDataLength() + byteIdx));
+                numCodePoints--;
+                byteIdx += src.codePointSize(src.getMetaDataLength() + byteIdx);
             }
-            builder.appendCodePoint(src.codePointAt(src.getMetaDataLength() + byteIdx));
-            numCodePoints--;
-            byteIdx += src.codePointSize(src.getMetaDataLength() + byteIdx);
         }
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
index 3f5de62..40ff2e4 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
@@ -161,9 +161,7 @@
         policy.close();
         frames.clear();
         numTuples = 0;
-        if (LOG.isTraceEnabled()) {
-            LOG.trace("VariableTupleMemoryManager has reorganized {} times", statsReOrg);
-        }
+        LOG.trace("VariableTupleMemoryManager has reorganized {} times", statsReOrg);
         statsReOrg = 0;
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index d9b473b..2f2153b 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -131,10 +131,8 @@
 
         final int numPartitions = getNumOfPartitions(inputDataBytesSize / ctx.getInitialFrameSize(), memoryBudget);
         final int entriesPerPartition = (int) Math.ceil(1.0 * tableSize / numPartitions);
-        if (LOGGER.isTraceEnabled()) {
-            LOGGER.trace("created hashtable, table size:{} file size:{}  #partitions:{}", tableSize, inputDataBytesSize,
-                    numPartitions);
-        }
+        LOGGER.trace("created hashtable, table size:{} file size:{}  #partitions:{}", tableSize, inputDataBytesSize,
+                numPartitions);
 
         final ArrayTupleBuilder outputTupleBuilder = new ArrayTupleBuilder(outRecordDescriptor.getFields().length);
 
@@ -185,10 +183,8 @@
                 if (force || hashTableForTuplePointer.isGarbageCollectionNeeded()) {
                     int numberOfFramesReclaimed =
                             hashTableForTuplePointer.collectGarbage(bufferAccessor, tpcIntermediate);
-                    if (LOGGER.isTraceEnabled()) {
-                        LOGGER.trace("Garbage Collection on Hash table is done. Deallocated frames:{}",
-                                numberOfFramesReclaimed);
-                    }
+                    LOGGER.trace("Garbage Collection on Hash table is done. Deallocated frames:{}",
+                            numberOfFramesReclaimed);
                     return numberOfFramesReclaimed != -1;
                 }
                 return false;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index e4a5c86..d79af79 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -527,11 +527,8 @@
                         int tabSize = -1;
                         if ((isLeftOuter || (buildPartSize < probePartSize))) {
                             //Case 1.1 - InMemHJ (without Role-Reversal)
-                            if (LOGGER.isTraceEnabled()) {
-                                LOGGER.trace(
-                                        "\t>>>Case 1.1 (IsLeftOuter || buildSize<probe) AND ApplyInMemHJ - [Level {}]",
-                                        level);
-                            }
+                            LOGGER.trace("\t>>>Case 1.1 (IsLeftOuter || buildSize<probe) AND ApplyInMemHJ - [Level {}]",
+                                    level);
                             tabSize = buildSizeInTuple;
                             if (tabSize == 0) {
                                 throw new HyracksDataException(
@@ -541,11 +538,9 @@
                             applyInMemHashJoin(buildKeys, probeKeys, tabSize, buildRd, probeRd, buildHpc, probeHpc,
                                     buildSideReader, probeSideReader, probComp); // checked-confirmed
                         } else { //Case 1.2 - InMemHJ with Role Reversal
-                            if (LOGGER.isTraceEnabled()) {
-                                LOGGER.trace(
-                                        "\t>>>Case 1.2. (NoIsLeftOuter || probe<build) AND ApplyInMemHJWITH RoleReversal - [Level {}]",
-                                        level);
-                            }
+                            LOGGER.trace(
+                                    "\t>>>Case 1.2. (NoIsLeftOuter || probe<build) AND ApplyInMemHJWITH RoleReversal - [Level {}]",
+                                    level);
                             tabSize = probeSizeInTuple;
                             if (tabSize == 0) {
                                 throw new HyracksDataException(
@@ -558,24 +553,18 @@
                     }
                     //Apply (Recursive) HHJ
                     else {
-                        if (LOGGER.isTraceEnabled()) {
-                            LOGGER.trace("\t>>>Case 2. ApplyRecursiveHHJ - [Level {}]", level);
-                        }
+                        LOGGER.trace("\t>>>Case 2. ApplyRecursiveHHJ - [Level {}]", level);
                         if ((isLeftOuter || buildPartSize < probePartSize)) {
                             //Case 2.1 - Recursive HHJ (without Role-Reversal)
-                            if (LOGGER.isTraceEnabled()) {
-                                LOGGER.trace(
-                                        "\t\t>>>Case 2.1 - RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level {}]",
-                                        level);
-                            }
+                            LOGGER.trace(
+                                    "\t\t>>>Case 2.1 - RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level {}]",
+                                    level);
                             applyHybridHashJoin((int) buildPartSize, PROBE_REL, BUILD_REL, probeKeys, buildKeys,
                                     probeRd, buildRd, probeHpc, buildHpc, probeSideReader, buildSideReader, level,
                                     beforeMax, probComp);
 
                         } else { //Case 2.2 - Recursive HHJ (with Role-Reversal)
-                            if (LOGGER.isTraceEnabled()) {
-                                LOGGER.trace("\t\t>>>Case 2.2. - RecursiveHHJ WITH RoleReversal - [Level {}]", level);
-                            }
+                            LOGGER.trace("\t\t>>>Case 2.2. - RecursiveHHJ WITH RoleReversal - [Level {}]", level);
 
                             applyHybridHashJoin((int) probePartSize, BUILD_REL, PROBE_REL, buildKeys, probeKeys,
                                     buildRd, probeRd, buildHpc, probeHpc, buildSideReader, probeSideReader, level,
@@ -641,11 +630,9 @@
                         BitSet rPStatus = rHHj.getPartitionStatus();
                         if ((afterMax < (NLJ_SWITCH_THRESHOLD * beforeMax))) {
                             //Case 2.1.1 - Keep applying HHJ
-                            if (LOGGER.isTraceEnabled()) {
-                                LOGGER.trace(
-                                        "\t\t>>>Case 2.1.1 - KEEP APPLYING RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level {}]",
-                                        level);
-                            }
+                            LOGGER.trace(
+                                    "\t\t>>>Case 2.1.1 - KEEP APPLYING RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level {}]",
+                                    level);
                             for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
                                 RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
                                 RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
@@ -674,11 +661,9 @@
                             }
 
                         } else { //Case 2.1.2 - Switch to NLJ
-                            if (LOGGER.isTraceEnabled()) {
-                                LOGGER.trace(
-                                        "\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level {}]",
-                                        level);
-                            }
+                            LOGGER.trace(
+                                    "\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level {}]",
+                                    level);
                             for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
                                 RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
                                 RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
index 1beaab8..e573c1c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
@@ -117,19 +117,14 @@
 
                     if (currentGenerationRunAvailable.isEmpty()) {
                         numberOfPasses++;
-                        if (LOGGER.isDebugEnabled()) {
-                            LOGGER.debug("generated runs:" + stop);
-                        }
+                        LOGGER.trace("generated runs: {}", stop);
                         runs.subList(0, stop).clear();
                         currentGenerationRunAvailable.clear();
                         currentGenerationRunAvailable.set(0, runs.size());
                         stop = runs.size();
                     }
                 } else {
-                    if (LOGGER.isDebugEnabled()) {
-                        LOGGER.debug("final runs: {}", stop);
-                        LOGGER.debug("number of passes: " + numberOfPasses);
-                    }
+                    LOGGER.trace("final runs: {}, number of passes: {}", stop, numberOfPasses);
                     merge(finalWriter, partialRuns);
                     break;
                 }
@@ -206,9 +201,7 @@
             }
         } finally {
             merger.close();
-            if (LOGGER.isDebugEnabled()) {
-                LOGGER.debug("Output " + io + " frames");
-            }
+            LOGGER.trace("Output {} frames", io);
         }
     }