Merge branch 'gerrit/neo'

Change-Id: I9e39d970fddd4652f1c459b50c9abb51cb2d0137
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
index a2b26e7..248e6d0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
@@ -36,6 +36,7 @@
 import org.apache.asterix.om.utils.RecordUtil;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.util.LogRedactionUtil;
 
 /**
  * A util that can verify if a filter field, a list of partitioning expressions,
@@ -73,7 +74,7 @@
         IAType fieldType = itemType.getSubFieldType(filterField);
         if (fieldType == null) {
             throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
-                    RecordUtil.toFullyQualifiedName(filterField));
+                    LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(filterField)));
         }
         switch (fieldType.getTypeTag()) {
             case TINYINT:
@@ -93,7 +94,7 @@
                 break;
             case UNION:
                 throw new CompilationException(ErrorCode.COMPILATION_FILTER_CANNOT_BE_NULLABLE,
-                        RecordUtil.toFullyQualifiedName(filterField));
+                        LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(filterField)));
             default:
                 throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_FILTER_TYPE,
                         fieldType.getTypeTag().name());
@@ -143,7 +144,8 @@
             IAType fieldType = recType.getSubFieldType(fieldName);
             if (fieldType == null) {
                 String unTypeField = fieldName.get(0) == null ? "" : fieldName.get(0);
-                throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc, unTypeField);
+                throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
+                        LogRedactionUtil.userData(unTypeField));
             }
             partitioningExprTypes.add(fieldType);
             ATypeTag pkTypeTag = fieldType.getTypeTag();
@@ -159,7 +161,7 @@
                 IAType fieldType = partitioningExprTypes.get(i);
                 if (fieldType == null) {
                     throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, sourceLoc,
-                            RecordUtil.toFullyQualifiedName(partitioningExpr));
+                            LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                 }
                 if (forPrimaryKey) {
                     boolean nullable = KeyFieldTypeUtil.chooseSource(keySourceIndicators, i, recType, metaRecType)
@@ -167,7 +169,8 @@
                     if (nullable) {
                         // key field is nullable
                         throw new CompilationException(ErrorCode.COMPILATION_KEY_CANNOT_BE_NULLABLE, sourceLoc,
-                                keyKindDisplayName, RecordUtil.toFullyQualifiedName(partitioningExpr));
+                                keyKindDisplayName,
+                                LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                     }
                 } else {
                     fieldType = TypeComputeUtils.getActualType(fieldType);
@@ -190,7 +193,8 @@
                         break;
                     case UNION:
                         throw new CompilationException(ErrorCode.COMPILATION_KEY_CANNOT_BE_NULLABLE, sourceLoc,
-                                keyKindDisplayName, RecordUtil.toFullyQualifiedName(partitioningExpr));
+                                keyKindDisplayName,
+                                LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(partitioningExpr)));
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_KEY_TYPE, sourceLoc,
                                 fieldType.getTypeTag(), keyKindDisplayName);
@@ -236,7 +240,9 @@
                         break;
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "The field '" + displayFieldName + "' which is of type " + fieldType.getTypeTag()
+                                "The field '"
+                                        + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
+                                        + "' which is " + "of type " + fieldType.getTypeTag()
                                         + " cannot be indexed using the BTree index.");
                 }
                 break;
@@ -251,14 +257,17 @@
                         break;
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "The field '" + displayFieldName + "' which is of type " + fieldType.getTypeTag()
+                                "The field '"
+                                        + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
+                                        + "' which is of type " + fieldType.getTypeTag()
                                         + " cannot be indexed using the RTree index.");
                 }
                 break;
             case LENGTH_PARTITIONED_NGRAM_INVIX:
                 if (fieldType.getTypeTag() != ATypeTag.STRING) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "The field '" + displayFieldName + "' which is of type " + fieldType.getTypeTag()
+                            "The field '" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
+                                    + "' which is of type " + fieldType.getTypeTag()
                                     + " cannot be indexed using the Length Partitioned N-Gram index.");
                 }
                 break;
@@ -270,14 +279,17 @@
                         break;
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "The field '" + displayFieldName + "' which is of type " + fieldType.getTypeTag()
+                                "The field '"
+                                        + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
+                                        + "' which is of type " + fieldType.getTypeTag()
                                         + " cannot be indexed using the Length Partitioned Keyword index.");
                 }
                 break;
             case SINGLE_PARTITION_NGRAM_INVIX:
                 if (fieldType.getTypeTag() != ATypeTag.STRING) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            "The field '" + displayFieldName + "' which is of type " + fieldType.getTypeTag()
+                            "The field '" + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
+                                    + "' which is of type " + fieldType.getTypeTag()
                                     + " cannot be indexed using the N-Gram index.");
                 }
                 break;
@@ -289,7 +301,9 @@
                         break;
                     default:
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "The field '" + displayFieldName + "' which is of type " + fieldType.getTypeTag()
+                                "The field '"
+                                        + LogRedactionUtil.userData(RecordUtil.toFullyQualifiedName(displayFieldName))
+                                        + "' which is of type " + fieldType.getTypeTag()
                                         + " cannot be indexed using the Keyword index.");
                 }
                 break;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageCleanupRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageCleanupRequestMessage.java
index 5e52517..281f654 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageCleanupRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/StorageCleanupRequestMessage.java
@@ -21,6 +21,7 @@
 import static org.apache.hyracks.util.ExitUtil.EC_NC_FAILED_TO_NOTIFY_TASKS_COMPLETED;
 
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.api.INcApplicationContext;
@@ -57,10 +58,12 @@
         PersistentLocalResourceRepository localResourceRepository =
                 (PersistentLocalResourceRepository) appContext.getLocalResourceRepository();
         Map<Long, LocalResource> localResources = localResourceRepository.loadAndGetAllResources();
+        Set<Integer> nodePartitions = appContext.getReplicaManager().getPartitions();
         for (LocalResource resource : localResources.values()) {
             DatasetLocalResource lr = (DatasetLocalResource) resource.getResource();
-            if (MetadataIndexImmutableProperties.isMetadataDataset(lr.getDatasetId())) {
-                // skip metadata indexes
+            if (!nodePartitions.contains(lr.getPartition())
+                    || MetadataIndexImmutableProperties.isMetadataDataset(lr.getDatasetId())) {
+                // skip replica partitions and metadata indexes
                 continue;
             }
             if (!validDatasetIds.contains(lr.getDatasetId())) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index f7da31d..5e4b730 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -65,6 +65,7 @@
 import org.apache.asterix.app.result.fields.ResultHandlePrinter;
 import org.apache.asterix.app.result.fields.ResultsPrinter;
 import org.apache.asterix.app.result.fields.StatusPrinter;
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.api.IClientRequest;
 import org.apache.asterix.common.api.IMetadataLockManager;
 import org.apache.asterix.common.api.IRequestTracker;
@@ -202,6 +203,7 @@
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.TypeSignature;
+import org.apache.asterix.om.utils.RecordUtil;
 import org.apache.asterix.runtime.fulltext.AbstractFullTextFilterDescriptor;
 import org.apache.asterix.runtime.fulltext.FullTextConfigDescriptor;
 import org.apache.asterix.runtime.fulltext.IFullTextFilterDescriptor;
@@ -258,6 +260,7 @@
 import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor.DropOption;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 import org.apache.hyracks.storage.am.lsm.invertedindex.fulltext.TokenizerCategory;
+import org.apache.hyracks.util.LogRedactionUtil;
 import org.apache.hyracks.util.OptionalBoolean;
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
@@ -829,7 +832,8 @@
                             metadataProvider, mdTxnCtx);
                     ExternalDataUtils.normalize(properties);
                     ExternalDataUtils.validate(properties);
-                    validateExternalDatasetProperties(externalDetails, properties, dd.getSourceLocation(), mdTxnCtx);
+                    validateExternalDatasetProperties(externalDetails, properties, dd.getSourceLocation(), mdTxnCtx,
+                            appCtx);
                     datasetDetails = new ExternalDatasetDetails(externalDetails.getAdapter(), properties, new Date(),
                             TransactionState.COMMIT);
                     break;
@@ -1221,7 +1225,7 @@
                         if (stmtCreateIndex.hasCastDefaultNull()) {
                             throw new CompilationException(ErrorCode.COMPILATION_ERROR,
                                     stmtCreateIndex.getSourceLocation(),
-                                    "CAST modifier is used without specifying " + "the type of the indexed field");
+                                    "CAST modifier is used without specifying the type of the indexed field");
                         }
                         fieldTypePrime = projectTypePrime;
                         fieldTypeNullable = projectTypeNullable;
@@ -1248,7 +1252,10 @@
                                 // allow overriding the type of the closed-field only if CAST modifier is used
                                 if (!stmtCreateIndex.hasCastDefaultNull()) {
                                     throw new CompilationException(ErrorCode.COMPILATION_ERROR,
-                                            indexedElement.getSourceLocation(), "Typed index on '" + projectPath
+                                            indexedElement.getSourceLocation(),
+                                            "Typed index on '"
+                                                    + LogRedactionUtil
+                                                            .userData(RecordUtil.toFullyQualifiedName(projectPath))
                                                     + "' field could be created only for open datatype");
                                 }
                             }
@@ -1629,11 +1636,13 @@
                     }
                     if (existingIndexKeyFieldNames.equals(indexKeyFieldNames)
                             && !existingIndexKeyFieldTypes.equals(indexKeyFieldTypes)) {
+                        String fieldNames = indexKeyFieldNames.stream().map(RecordUtil::toFullyQualifiedName)
+                                .collect(Collectors.joining(","));
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
                                 "Cannot create index " + index.getIndexName() + " , enforced index "
-                                        + existingIndex.getIndexName() + " on field '"
-                                        + StringUtils.join(indexKeyFieldNames, ',') + "' is already defined with type '"
-                                        + existingIndexKeyFieldTypes + "'");
+                                        + existingIndex.getIndexName() + " on field(s) '"
+                                        + LogRedactionUtil.userData(fieldNames) + "' is already defined with type(s) '"
+                                        + StringUtils.join(existingIndexKeyFieldTypes, ',') + "'");
                     }
                 }
             }
@@ -4773,13 +4782,13 @@
     }
 
     protected void validateExternalDatasetProperties(ExternalDetailsDecl externalDetails,
-            Map<String, String> properties, SourceLocation srcLoc, MetadataTransactionContext mdTxnCtx)
-            throws AlgebricksException, HyracksDataException {
+            Map<String, String> properties, SourceLocation srcLoc, MetadataTransactionContext mdTxnCtx,
+            IApplicationContext appCtx) throws AlgebricksException, HyracksDataException {
         // Validate adapter specific properties
         String adapter = externalDetails.getAdapter();
         Map<String, String> details = new HashMap<>(properties);
         details.put(ExternalDataConstants.KEY_EXTERNAL_SOURCE_TYPE, adapter);
-        validateAdapterSpecificProperties(details, srcLoc);
+        validateAdapterSpecificProperties(details, srcLoc, appCtx);
     }
 
     /**
@@ -4787,9 +4796,9 @@
      *
      * @param configuration external source properties
      */
-    protected void validateAdapterSpecificProperties(Map<String, String> configuration, SourceLocation srcLoc)
-            throws CompilationException {
-        ExternalDataUtils.validateAdapterSpecificProperties(configuration, srcLoc, warningCollector);
+    protected void validateAdapterSpecificProperties(Map<String, String> configuration, SourceLocation srcLoc,
+            IApplicationContext appCtx) throws CompilationException {
+        ExternalDataUtils.validateAdapterSpecificProperties(configuration, srcLoc, warningCollector, appCtx);
     }
 
     protected enum CreateResult {
diff --git a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
index e642922..49e4d74 100644
--- a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -530,37 +530,37 @@
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_1">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>ASX1079: Compilation error: The field '[loc]' which is of type point cannot be indexed using the BTree index. (in line 37, at column 33)</expected-error>
+        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the BTree index. (in line 37, at column 33)</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_2">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>ASX1079: Compilation error: The field '[age]' which is of type integer cannot be indexed using the RTree index. (in line 37, at column 33)</expected-error>
+        <expected-error>ASX1079: Compilation error: The field 'age' which is of type integer cannot be indexed using the RTree index. (in line 37, at column 33)</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_3">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>ASX1079: Compilation error: The field '[loc]' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
+        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_4">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>ASX1079: Compilation error: The field '[loc]' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
+        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned Keyword index. (in line 37, at column 33)</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_5">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>ASX1079: Compilation error: The field '[loc]' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
+        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
       <compilation-unit name="issue_384_create_index_error_6">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>ASX1079: Compilation error: The field '[loc]' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
+        <expected-error>ASX1079: Compilation error: The field 'loc' which is of type point cannot be indexed using the Length Partitioned N-Gram index. (in line 37, at column 33)</expected-error>
       </compilation-unit>
     </test-case>
   </test-group>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
index 70092cf..d99f2e9 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.regexadm
@@ -8,6 +8,7 @@
     "active\.memory\.global\.budget" : 67108864,
     "active\.stop\.timeout" : 3600,
     "active\.suspend\.timeout" : 3600,
+    "azure.request.timeout" : 120,
     "compiler\.arrayindex" : true,
     "compiler.batch.lookup" : false,
     "compiler\.external\.field\.pushdown" : true,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
index 3faac6f..479de3e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
@@ -8,6 +8,7 @@
     "active\.memory\.global\.budget" : 67108864,
     "active\.stop\.timeout" : 3600,
     "active\.suspend\.timeout" : 3600,
+    "azure.request.timeout" : 120,
     "compiler\.arrayindex" : true,
     "compiler.batch.lookup" : false,
     "compiler\.external\.field\.pushdown" : true,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
index e42306c..3349f11 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
@@ -8,6 +8,7 @@
     "active\.memory\.global\.budget" : 67108864,
     "active\.stop\.timeout" : 3600,
     "active\.suspend\.timeout" : 3600,
+    "azure.request.timeout" : 120,
     "compiler\.arrayindex" : true,
     "compiler.batch.lookup" : false,
     "compiler\.external\.field\.pushdown" : true,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 14322f2..1267995 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -4335,7 +4335,7 @@
         <expected-error>CAST modifier is only allowed for B-Tree indexes</expected-error>
         <expected-error>CAST modifier cannot be specified together with ENFORCED</expected-error>
         <expected-error>CAST modifier is used without specifying the type of the indexed field</expected-error>
-        <expected-error>Typed index on '[typed_f2]' field could be created only for open datatype</expected-error>
+        <expected-error>Typed index on 'typed_f2' field could be created only for open datatype</expected-error>
         <expected-error>Parameter invalid_date cannot be set</expected-error>
       </compilation-unit>
     </test-case>
@@ -7326,13 +7326,13 @@
       <test-case FilePath="open-index-enforced/error-checking">
         <compilation-unit name="index-type-collision">
           <output-dir compare="Text">index-type-collision</output-dir>
-          <expected-error>Cannot create index testIdx2 , enforced index testIdx1 on field '[value]' is already defined with type '[integer]'</expected-error>
+          <expected-error>Cannot create index testIdx2 , enforced index testIdx1 on field(s) 'value' is already defined with type(s) 'integer'</expected-error>
         </compilation-unit>
       </test-case>
       <test-case FilePath="open-index-enforced/error-checking">
         <compilation-unit name="index-type-promotion-collision">
           <output-dir compare="Text">index-type-promotion-collision</output-dir>
-          <expected-error>Cannot create index testIdx2 , enforced index testIdx1 on field '[value]' is already defined with type '[bigint]'</expected-error>
+          <expected-error>Cannot create index testIdx2 , enforced index testIdx1 on field(s) 'value' is already defined with type(s) 'bigint'</expected-error>
         </compilation-unit>
       </test-case>
       <test-case FilePath="open-index-enforced/error-checking">
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
index 46258bf..515aad6 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
@@ -51,7 +51,8 @@
                 StorageUtil.getIntSizeInBytes(200, StorageUtil.StorageUnit.MEGABYTE),
                 "The maximum accepted web request size in bytes"),
         REQUESTS_ARCHIVE_SIZE(NONNEGATIVE_INTEGER, 50, "The maximum number of archived requests to maintain"),
-        LIBRARY_DEPLOY_TIMEOUT(POSITIVE_INTEGER, 1800, "Timeout to upload a UDF in seconds");
+        LIBRARY_DEPLOY_TIMEOUT(POSITIVE_INTEGER, 1800, "Timeout to upload a UDF in seconds"),
+        AZURE_REQUEST_TIMEOUT(POSITIVE_INTEGER, 120, "Timeout for Azure client requests in seconds");
 
         private final IOptionType type;
         private final Object defaultValue;
@@ -78,6 +79,7 @@
                 case MAX_WAIT_ACTIVE_CLUSTER:
                 case MAX_WEB_REQUEST_SIZE:
                 case LIBRARY_DEPLOY_TIMEOUT:
+                case AZURE_REQUEST_TIMEOUT:
                     return Section.COMMON;
                 case CC_JAVA_OPTS:
                 case NC_JAVA_OPTS:
@@ -155,4 +157,7 @@
         return accessor.getInt(Option.LIBRARY_DEPLOY_TIMEOUT);
     }
 
+    public int getAzureRequestTimeout() {
+        return accessor.getInt(Option.AZURE_REQUEST_TIMEOUT);
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java
index b402f25..cdb3834 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStream.java
@@ -25,6 +25,7 @@
 import java.util.Map;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -45,9 +46,10 @@
     private final BlobServiceClient client;
     private final String container;
 
-    public AzureBlobInputStream(Map<String, String> configuration, List<String> filePaths) throws HyracksDataException {
+    public AzureBlobInputStream(IApplicationContext appCtx, Map<String, String> configuration, List<String> filePaths)
+            throws HyracksDataException {
         super(configuration, filePaths);
-        this.client = buildAzureClient(configuration);
+        this.client = buildAzureClient(appCtx, configuration);
         this.container = configuration.get(ExternalDataConstants.CONTAINER_NAME_FIELD_NAME);
     }
 
@@ -81,9 +83,10 @@
         return true;
     }
 
-    private BlobServiceClient buildAzureClient(Map<String, String> configuration) throws HyracksDataException {
+    private BlobServiceClient buildAzureClient(IApplicationContext appCtx, Map<String, String> configuration)
+            throws HyracksDataException {
         try {
-            return ExternalDataUtils.Azure.buildAzureBlobClient(configuration);
+            return ExternalDataUtils.Azure.buildAzureBlobClient(appCtx, configuration);
         } catch (CompilationException ex) {
             throw HyracksDataException.create(ex);
         }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStreamFactory.java
index bf904a4..064b319 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/blob/AzureBlobInputStreamFactory.java
@@ -23,6 +23,7 @@
 import java.util.Map;
 import java.util.PriorityQueue;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.external.api.AsterixInputStream;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStreamFactory;
 import org.apache.asterix.external.util.ExternalDataUtils;
@@ -41,7 +42,10 @@
 
     @Override
     public AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
-        return new AzureBlobInputStream(configuration, partitionWorkLoadsBasedOnSize.get(partition).getFilePaths());
+        IApplicationContext appCtx =
+                (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+        return new AzureBlobInputStream(appCtx, configuration,
+                partitionWorkLoadsBasedOnSize.get(partition).getFilePaths());
     }
 
     @Override
@@ -49,10 +53,11 @@
             throws AlgebricksException {
         super.configure(ctx, configuration, warningCollector);
 
+        IApplicationContext appCtx = (IApplicationContext) ctx.getApplicationContext();
         // Ensure the validity of include/exclude
         ExternalDataUtils.validateIncludeExclude(configuration);
         IncludeExcludeMatcher includeExcludeMatcher = ExternalDataUtils.getIncludeExcludeMatchers(configuration);
-        BlobServiceClient blobServiceClient = ExternalDataUtils.Azure.buildAzureBlobClient(configuration);
+        BlobServiceClient blobServiceClient = ExternalDataUtils.Azure.buildAzureBlobClient(appCtx, configuration);
         List<BlobItem> filesOnly = ExternalDataUtils.Azure.listBlobItems(blobServiceClient, configuration,
                 includeExcludeMatcher, warningCollector);
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java
index b7d142f..e34d188 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStream.java
@@ -25,6 +25,7 @@
 import java.util.Map;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -45,10 +46,10 @@
     private final DataLakeServiceClient client;
     private final String container;
 
-    public AzureDataLakeInputStream(Map<String, String> configuration, List<String> filePaths)
-            throws HyracksDataException {
+    public AzureDataLakeInputStream(IApplicationContext appCtx, Map<String, String> configuration,
+            List<String> filePaths) throws HyracksDataException {
         super(configuration, filePaths);
-        this.client = buildAzureClient(configuration);
+        this.client = buildAzureClient(appCtx, configuration);
         this.container = configuration.get(ExternalDataConstants.CONTAINER_NAME_FIELD_NAME);
     }
 
@@ -82,9 +83,10 @@
         return true;
     }
 
-    private DataLakeServiceClient buildAzureClient(Map<String, String> configuration) throws HyracksDataException {
+    private DataLakeServiceClient buildAzureClient(IApplicationContext appCtx, Map<String, String> configuration)
+            throws HyracksDataException {
         try {
-            return ExternalDataUtils.Azure.buildAzureDatalakeClient(configuration);
+            return ExternalDataUtils.Azure.buildAzureDatalakeClient(appCtx, configuration);
         } catch (CompilationException ex) {
             throw HyracksDataException.create(ex);
         }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStreamFactory.java
index e145e1f..e9f8d4c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/datalake/AzureDataLakeInputStreamFactory.java
@@ -23,6 +23,7 @@
 import java.util.Map;
 import java.util.PriorityQueue;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.external.api.AsterixInputStream;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStreamFactory;
 import org.apache.asterix.external.util.ExternalDataUtils;
@@ -41,7 +42,10 @@
 
     @Override
     public AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
-        return new AzureDataLakeInputStream(configuration, partitionWorkLoadsBasedOnSize.get(partition).getFilePaths());
+        IApplicationContext appCtx =
+                (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+        return new AzureDataLakeInputStream(appCtx, configuration,
+                partitionWorkLoadsBasedOnSize.get(partition).getFilePaths());
     }
 
     @Override
@@ -49,10 +53,11 @@
             throws AlgebricksException {
         super.configure(ctx, configuration, warningCollector);
 
+        IApplicationContext appCtx = (IApplicationContext) ctx.getApplicationContext();
         // Ensure the validity of include/exclude
         ExternalDataUtils.validateIncludeExclude(configuration);
         IncludeExcludeMatcher includeExcludeMatcher = ExternalDataUtils.getIncludeExcludeMatchers(configuration);
-        DataLakeServiceClient client = ExternalDataUtils.Azure.buildAzureDatalakeClient(configuration);
+        DataLakeServiceClient client = ExternalDataUtils.Azure.buildAzureDatalakeClient(appCtx, configuration);
         List<PathItem> filesOnly = ExternalDataUtils.Azure.listDatalakePathItems(client, configuration,
                 includeExcludeMatcher, warningCollector);
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureBlobParquetReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureBlobParquetReaderFactory.java
index 1f82dae..c2251df 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureBlobParquetReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureBlobParquetReaderFactory.java
@@ -23,6 +23,7 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.external.input.HDFSDataSourceFactory;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStreamFactory.IncludeExcludeMatcher;
@@ -45,7 +46,8 @@
     @Override
     public void configure(IServiceContext serviceCtx, Map<String, String> configuration,
             IWarningCollector warningCollector) throws AlgebricksException, HyracksDataException {
-        BlobServiceClient blobServiceClient = ExternalDataUtils.Azure.buildAzureBlobClient(configuration);
+        IApplicationContext appCtx = (IApplicationContext) serviceCtx.getApplicationContext();
+        BlobServiceClient blobServiceClient = ExternalDataUtils.Azure.buildAzureBlobClient(appCtx, configuration);
         //Get endpoint
         String endPoint = extractEndPoint(blobServiceClient.getAccountUrl());
         //Get path
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureDataLakeParquetReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureDataLakeParquetReaderFactory.java
index 8474a74..db87868 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureDataLakeParquetReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/azure/parquet/AzureDataLakeParquetReaderFactory.java
@@ -23,6 +23,7 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.external.input.HDFSDataSourceFactory;
 import org.apache.asterix.external.input.record.reader.abstracts.AbstractExternalInputStreamFactory.IncludeExcludeMatcher;
@@ -45,7 +46,9 @@
     @Override
     public void configure(IServiceContext serviceCtx, Map<String, String> configuration,
             IWarningCollector warningCollector) throws AlgebricksException, HyracksDataException {
-        DataLakeServiceClient dataLakeServiceClient = ExternalDataUtils.Azure.buildAzureDatalakeClient(configuration);
+        IApplicationContext appCtx = (IApplicationContext) serviceCtx.getApplicationContext();
+        DataLakeServiceClient dataLakeServiceClient =
+                ExternalDataUtils.Azure.buildAzureDatalakeClient(appCtx, configuration);
 
         //Get endpoint
         String endPoint = extractEndPoint(dataLakeServiceClient.getAccountUrl());
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AbstractMultipleInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AbstractMultipleInputStream.java
index 8f032d8..18ef150 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AbstractMultipleInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AbstractMultipleInputStream.java
@@ -21,10 +21,13 @@
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.api.AsterixInputStream;
 import org.apache.asterix.external.api.IStreamNotificationHandler;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.util.ExceptionUtils;
 
 /**
  * Base class for a source stream that is composed of multiple separate input streams. Reading proceeds one stream at
@@ -54,25 +57,30 @@
 
     @Override
     public final int read(byte[] b, int off, int len) throws IOException {
-        if (in == null) {
-            if (!advance()) {
-                return -1;
+        try {
+            if (in == null) {
+                if (!advance()) {
+                    return -1;
+                }
             }
+            int result = in.read(b, off, len);
+            if (result < 0 && (lastByte != ExternalDataConstants.BYTE_LF)
+                    && (lastByte != ExternalDataConstants.BYTE_CR)) {
+                // return a new line at the end of every file <--Might create problems for some cases
+                // depending on the parser implementation-->
+                lastByte = ExternalDataConstants.BYTE_LF;
+                b[off] = ExternalDataConstants.BYTE_LF;
+                return 1;
+            }
+            while ((result < 0) && advance()) {
+                result = in.read(b, off, len);
+            }
+            if (result > 0) {
+                lastByte = b[(off + result) - 1];
+            }
+            return result;
+        } catch (Exception e) {
+            throw RuntimeDataException.create(ErrorCode.EXTERNAL_SOURCE_ERROR, ExceptionUtils.getMessageOrToString(e));
         }
-        int result = in.read(b, off, len);
-        if (result < 0 && (lastByte != ExternalDataConstants.BYTE_LF) && (lastByte != ExternalDataConstants.BYTE_CR)) {
-            // return a new line at the end of every file <--Might create problems for some cases
-            // depending on the parser implementation-->
-            lastByte = ExternalDataConstants.BYTE_LF;
-            b[off] = ExternalDataConstants.BYTE_LF;
-            return 1;
-        }
-        while ((result < 0) && advance()) {
-            result = in.read(b, off, len);
-        }
-        if (result > 0) {
-            lastByte = b[(off + result) - 1];
-        }
-        return result;
     }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index 22040e2..8e38eed 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -90,6 +90,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
@@ -139,6 +140,7 @@
 import com.azure.storage.blob.models.BlobItem;
 import com.azure.storage.blob.models.ListBlobsOptions;
 import com.azure.storage.common.StorageSharedKeyCredential;
+import com.azure.storage.common.policy.RequestRetryOptions;
 import com.azure.storage.file.datalake.DataLakeFileSystemClient;
 import com.azure.storage.file.datalake.DataLakeServiceClient;
 import com.azure.storage.file.datalake.DataLakeServiceClientBuilder;
@@ -597,7 +599,7 @@
      * @param configuration properties
      */
     public static void validateAdapterSpecificProperties(Map<String, String> configuration, SourceLocation srcLoc,
-            IWarningCollector collector) throws CompilationException {
+            IWarningCollector collector, IApplicationContext appCtx) throws CompilationException {
         String type = configuration.get(ExternalDataConstants.KEY_EXTERNAL_SOURCE_TYPE);
 
         switch (type) {
@@ -605,10 +607,10 @@
                 AwsS3.validateProperties(configuration, srcLoc, collector);
                 break;
             case ExternalDataConstants.KEY_ADAPTER_NAME_AZURE_BLOB:
-                Azure.validateAzureBlobProperties(configuration, srcLoc, collector);
+                Azure.validateAzureBlobProperties(configuration, srcLoc, collector, appCtx);
                 break;
             case ExternalDataConstants.KEY_ADAPTER_NAME_AZURE_DATA_LAKE:
-                Azure.validateAzureDataLakeProperties(configuration, srcLoc, collector);
+                Azure.validateAzureDataLakeProperties(configuration, srcLoc, collector, appCtx);
                 break;
             case KEY_ADAPTER_NAME_GCS:
                 GCS.validateProperties(configuration, srcLoc, collector);
@@ -1277,8 +1279,8 @@
          * @param configuration properties
          * @return client
          */
-        public static BlobServiceClient buildAzureBlobClient(Map<String, String> configuration)
-                throws CompilationException {
+        public static BlobServiceClient buildAzureBlobClient(IApplicationContext appCtx,
+                Map<String, String> configuration) throws CompilationException {
             String managedIdentityId = configuration.get(MANAGED_IDENTITY_ID_FIELD_NAME);
             String accountName = configuration.get(ACCOUNT_NAME_FIELD_NAME);
             String accountKey = configuration.get(ACCOUNT_KEY_FIELD_NAME);
@@ -1292,6 +1294,9 @@
 
             // Client builder
             BlobServiceClientBuilder builder = new BlobServiceClientBuilder();
+            int timeout = appCtx.getExternalProperties().getAzureRequestTimeout();
+            RequestRetryOptions requestRetryOptions = new RequestRetryOptions(null, null, timeout, null, null, null);
+            builder.retryOptions(requestRetryOptions);
 
             // Endpoint is required
             if (endpoint == null) {
@@ -1428,8 +1433,8 @@
          * @param configuration properties
          * @return client
          */
-        public static DataLakeServiceClient buildAzureDatalakeClient(Map<String, String> configuration)
-                throws CompilationException {
+        public static DataLakeServiceClient buildAzureDatalakeClient(IApplicationContext appCtx,
+                Map<String, String> configuration) throws CompilationException {
             String managedIdentityId = configuration.get(MANAGED_IDENTITY_ID_FIELD_NAME);
             String accountName = configuration.get(ACCOUNT_NAME_FIELD_NAME);
             String accountKey = configuration.get(ACCOUNT_KEY_FIELD_NAME);
@@ -1443,6 +1448,9 @@
 
             // Client builder
             DataLakeServiceClientBuilder builder = new DataLakeServiceClientBuilder();
+            int timeout = appCtx.getExternalProperties().getAzureRequestTimeout();
+            RequestRetryOptions requestRetryOptions = new RequestRetryOptions(null, null, timeout, null, null, null);
+            builder.retryOptions(requestRetryOptions);
 
             // Endpoint is required
             if (endpoint == null) {
@@ -1702,7 +1710,7 @@
          * @throws CompilationException Compilation exception
          */
         public static void validateAzureBlobProperties(Map<String, String> configuration, SourceLocation srcLoc,
-                IWarningCollector collector) throws CompilationException {
+                IWarningCollector collector, IApplicationContext appCtx) throws CompilationException {
 
             // check if the format property is present
             if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
@@ -1715,7 +1723,7 @@
             BlobServiceClient blobServiceClient;
             try {
                 String container = configuration.get(ExternalDataConstants.CONTAINER_NAME_FIELD_NAME);
-                blobServiceClient = buildAzureBlobClient(configuration);
+                blobServiceClient = buildAzureBlobClient(appCtx, configuration);
                 BlobContainerClient blobContainer = blobServiceClient.getBlobContainerClient(container);
 
                 // Get all objects in a container and extract the paths to files
@@ -1741,7 +1749,7 @@
          * @throws CompilationException Compilation exception
          */
         public static void validateAzureDataLakeProperties(Map<String, String> configuration, SourceLocation srcLoc,
-                IWarningCollector collector) throws CompilationException {
+                IWarningCollector collector, IApplicationContext appCtx) throws CompilationException {
 
             // check if the format property is present
             if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
@@ -1754,7 +1762,7 @@
             DataLakeServiceClient dataLakeServiceClient;
             try {
                 String container = configuration.get(ExternalDataConstants.CONTAINER_NAME_FIELD_NAME);
-                dataLakeServiceClient = buildAzureDatalakeClient(configuration);
+                dataLakeServiceClient = buildAzureDatalakeClient(appCtx, configuration);
                 DataLakeFileSystemClient fileSystemClient = dataLakeServiceClient.getFileSystemClient(container);
 
                 // Get all objects in a container and extract the paths to files
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ViewUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ViewUtil.java
index cb704b2..975a387 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ViewUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ViewUtil.java
@@ -54,6 +54,7 @@
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.IWarningCollector;
 import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.util.LogRedactionUtil;
 
 public final class ViewUtil {
 
@@ -133,7 +134,8 @@
                 AUnionType unionType = (AUnionType) fieldType;
                 if (!unionType.isNullableType()) {
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                            String.format("Invalid type for field %s. Optional type must allow NULL", fieldNames[i]));
+                            String.format("Invalid type for field %s. Optional type must allow NULL",
+                                    LogRedactionUtil.userData(fieldNames[i])));
                 }
                 primeType = unionType.getActualType();
             } else {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
index 5201a6a..6e62262 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
@@ -54,6 +54,7 @@
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.api.exceptions.SourceLocation;
+import org.apache.hyracks.util.LogRedactionUtil;
 
 /**
  * Provider utility methods for data types
@@ -225,7 +226,8 @@
                     if (typeIntermediate == null) {
                         String fName = String.join(".", subFieldName);
                         throw new AsterixException(ErrorCode.COMPILATION_ERROR,
-                                "No list item type found. Wrong type given from field " + fName);
+                                "No list item type found. Wrong type given from field "
+                                        + LogRedactionUtil.userData(fName));
                     }
                     subFieldName.add(keyFieldNames.get(i));
                 }
@@ -243,8 +245,8 @@
                 ATypeTag tt = TypeComputeUtils.getActualType(typeIntermediate).getTypeTag();
                 if (tt != ATypeTag.OBJECT && tt != ATypeTag.ARRAY && tt != ATypeTag.MULTISET) {
                     String fName = String.join(".", subFieldName);
-                    throw new AsterixException(ErrorCode.COMPILATION_ERROR,
-                            "Field accessor is not defined for \"" + fName + "\" of type " + tt);
+                    throw new AsterixException(ErrorCode.COMPILATION_ERROR, "Field accessor is not defined for '"
+                            + LogRedactionUtil.userData(fName) + "' of type " + tt);
                 }
             }
 
@@ -309,8 +311,9 @@
                     recordNameTypesMap.put(keyFieldNames.get(keyFieldNames.size() - 1),
                             AUnionType.createNullableType(nestArrayType(keyFieldType, isKeyTypeWithUnnest)));
                 } else if (!ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(), this.keyFieldType.getTypeTag())) {
-                    throw new AsterixException(ErrorCode.COMPILATION_ERROR, "Cannot enforce field \""
-                            + String.join(".", this.keyFieldNames) + "\" to have type " + this.keyFieldType);
+                    throw new AsterixException(ErrorCode.COMPILATION_ERROR,
+                            "Cannot enforce field '" + LogRedactionUtil.userData(String.join(".", this.keyFieldNames))
+                                    + "' to have type " + this.keyFieldType);
                 }
             } else {
                 recordNameTypesMap.put(keyFieldNames.get(keyFieldNames.size() - 1),
@@ -533,8 +536,8 @@
         IAType actualType = TypeComputeUtils.getActualType(nestedRecordType);
         if (actualType.getTypeTag() != ATypeTag.OBJECT) {
             String fName = String.join(".", fieldName);
-            throw new AsterixException(ErrorCode.COMPILATION_ERROR,
-                    "Field accessor is not defined for \"" + fName + "\" of type " + actualType.getTypeTag());
+            throw new AsterixException(ErrorCode.COMPILATION_ERROR, "Field accessor is not defined for '"
+                    + LogRedactionUtil.userData(fName) + "' of type " + actualType.getTypeTag());
         }
     }