Merge branch 'release-0.9.1'

Change-Id:I4f05c7ceb1de1ed5c38d7c30e82c2ab1ee4323ba
diff --git a/README.md b/README.md
index 105286d..013f905 100644
--- a/README.md
+++ b/README.md
@@ -23,10 +23,13 @@
 AsterixDB is a BDMS (Big Data Management System) with a rich feature set that sets it apart from other Big Data platforms.  Its feature set makes it well-suited to modern needs such as web data warehousing and social data storage and analysis. AsterixDB has:
 
 - __Data model__<br/>
-A semistructured NoSQL style data model (ADM) resulting from extending JSON with object database ideas
+A semistructured NoSQL style data model ([ADM](https://ci.apache.org/projects/asterixdb/datamodel.html)) resulting from
+extending JSON with object database ideas
 
 - __Query languages__<br/>
-Two expressive and declarative query languages (SQL++ and AQL) that support a broad range of queries and analysis over semistructured data
+Two expressive and declarative query languages ([SQL++](http://asterixdb.apache.org/docs/0.9.0/sqlpp/manual.html)
+and [AQL](http://asterixdb.apache.org/docs/0.9.0/aql/manual.html)) that support a broad range of queries and analysis
+over semistructured data
 
 - __Scalability__<br/>
 A parallel runtime query execution engine, Apache Hyracks, that has been scale-tested on up to 1000+ cores and 500+ disks
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java
index d7998f8..b4ed8e5 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.api.job.JobSpecification;
 
 public class ActiveJobNotificationHandler implements Runnable {
-    public static final ActiveJobNotificationHandler INSTANCE = new ActiveJobNotificationHandler();
     public static final String ACTIVE_ENTITY_PROPERTY_NAME = "ActiveJob";
     private static final Logger LOGGER = Logger.getLogger(ActiveJobNotificationHandler.class.getName());
     private static final boolean DEBUG = false;
@@ -38,7 +37,7 @@
     private final Map<EntityId, IActiveEntityEventsListener> entityEventListeners;
     private final Map<JobId, EntityId> jobId2ActiveJobInfos;
 
-    private ActiveJobNotificationHandler() {
+    public ActiveJobNotificationHandler() {
         this.eventInbox = new LinkedBlockingQueue<>();
         this.jobId2ActiveJobInfos = new HashMap<>();
         this.entityEventListeners = new HashMap<>();
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java
index 6a10b0c..86c3e7d 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java
@@ -34,20 +34,21 @@
 public class ActiveLifecycleListener implements IJobLifecycleListener {
 
     private static final Logger LOGGER = Logger.getLogger(ActiveLifecycleListener.class.getName());
-    public static final ActiveLifecycleListener INSTANCE = new ActiveLifecycleListener();
 
+    private final ActiveJobNotificationHandler notificationHandler;
     private final LinkedBlockingQueue<ActiveEvent> jobEventInbox;
     private final ExecutorService executorService;
 
-    private ActiveLifecycleListener() {
-        jobEventInbox = ActiveJobNotificationHandler.INSTANCE.getEventInbox();
+    public ActiveLifecycleListener() {
+        notificationHandler = new ActiveJobNotificationHandler();
+        jobEventInbox = notificationHandler.getEventInbox();
         executorService = Executors.newSingleThreadExecutor();
-        executorService.execute(ActiveJobNotificationHandler.INSTANCE);
+        executorService.execute(notificationHandler);
     }
 
     @Override
     public synchronized void notifyJobStart(JobId jobId) throws HyracksException {
-        EntityId entityId = ActiveJobNotificationHandler.INSTANCE.getEntity(jobId);
+        EntityId entityId = notificationHandler.getEntity(jobId);
         if (entityId != null) {
             jobEventInbox.add(new ActiveEvent(jobId, Kind.JOB_STARTED, entityId));
         }
@@ -55,7 +56,7 @@
 
     @Override
     public synchronized void notifyJobFinish(JobId jobId) throws HyracksException {
-        EntityId entityId = ActiveJobNotificationHandler.INSTANCE.getEntity(jobId);
+        EntityId entityId = notificationHandler.getEntity(jobId);
         if (entityId != null) {
             jobEventInbox.add(new ActiveEvent(jobId, Kind.JOB_FINISHED, entityId));
         } else {
@@ -67,7 +68,7 @@
 
     @Override
     public void notifyJobCreation(JobId jobId, JobSpecification spec) throws HyracksException {
-        ActiveJobNotificationHandler.INSTANCE.notifyJobCreation(jobId, spec);
+        notificationHandler.notifyJobCreation(jobId, spec);
     }
 
     public void receive(ActivePartitionMessage message) {
@@ -78,4 +79,8 @@
     public void stop() {
         executorService.shutdown();
     }
+
+    public ActiveJobNotificationHandler getNotificationHandler() {
+        return notificationHandler;
+    }
 }
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
index ac3caf3..98a6979 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
@@ -22,7 +22,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.active.message.ActivePartitionMessage;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -40,7 +40,7 @@
 
     public ActiveSourceOperatorNodePushable(IHyracksTaskContext ctx, ActiveRuntimeId runtimeId) {
         this.ctx = ctx;
-        activeManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+        activeManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
                 .getApplicationContext()).getActiveManager();
         this.runtimeId = runtimeId;
     }
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
index da3ac1c..231ec25 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
@@ -21,12 +21,11 @@
 import java.io.Serializable;
 
 import org.apache.asterix.active.ActiveManager;
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class ActiveManagerMessage implements IApplicationMessage {
+public class ActiveManagerMessage implements INcAddressedMessage {
     public static final byte STOP_ACTIVITY = 0x00;
 
     private static final long serialVersionUID = 1L;
@@ -53,9 +52,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
-        ((ActiveManager) appContext.getActiveManager()).submit(this);
+    public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((ActiveManager) appCtx.getActiveManager()).submit(this);
     }
 
     @Override
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
index 3c7aa06..335121a 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
@@ -22,12 +22,12 @@
 
 import org.apache.asterix.active.ActiveLifecycleListener;
 import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class ActivePartitionMessage implements IApplicationMessage {
+public class ActivePartitionMessage implements ICcAddressedMessage {
 
     public static final byte ACTIVE_RUNTIME_REGISTERED = 0x00;
     public static final byte ACTIVE_RUNTIME_DEREGISTERED = 0x01;
@@ -66,8 +66,9 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        ActiveLifecycleListener.INSTANCE.receive(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        activeListener.receive(this);
     }
 
     @Override
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index 5eae36b..3d7ba34 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -22,7 +22,7 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.asterix.common.dataflow.IApplicationContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.declared.DataSourceId;
@@ -234,7 +234,7 @@
                     invertedIndexFieldsForNonBulkLoadOps[k] = k;
                 }
             }
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint =
                     metadataProvider.getSplitProviderAndConstraints(dataset, indexName);
             // TODO: Here we assume there is only one search key field.
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java
index 32d766b..2db5d9a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java
@@ -21,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.optimizer.base.RuleCollections;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
@@ -32,17 +33,19 @@
 public class DefaultRuleSetFactory implements IRuleSetFactory {
 
     @Override
-    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites()
-            throws AlgebricksException {
-        return buildLogical();
+    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites(
+            ICcApplicationContext appCtx) throws AlgebricksException {
+        return buildLogical(appCtx);
     }
 
     @Override
-    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites() {
-        return buildPhysical();
+    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites(
+            ICcApplicationContext appCtx) {
+        return buildPhysical(appCtx);
     }
 
-    public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildLogical() {
+    public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildLogical(
+            ICcApplicationContext appCtx) {
         List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<>();
         SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
         SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
@@ -51,17 +54,18 @@
         defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildTypeInferenceRuleCollection()));
         defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildAutogenerateIDRuleCollection()));
         defaultLogicalRewrites
-            .add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection()));
+                .add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection(appCtx)));
         defaultLogicalRewrites
                 .add(new Pair<>(seqCtrlNoDfs, RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection(appCtx)));
         // fj
         defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildFuzzyJoinRuleCollection()));
         //
-        defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection()));
+        defaultLogicalRewrites
+                .add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection(appCtx)));
         defaultLogicalRewrites
                 .add(new Pair<>(seqCtrlNoDfs, RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection(appCtx)));
         defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildDataExchangeRuleCollection()));
         defaultLogicalRewrites.add(new Pair<>(seqCtrlNoDfs, RuleCollections.buildConsolidationRuleCollection()));
         defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildFulltextContainsRuleCollection()));
@@ -72,14 +76,15 @@
         return defaultLogicalRewrites;
     }
 
-    public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildPhysical() {
+    public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildPhysical(
+            ICcApplicationContext appCtx) {
         List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<>();
         SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
         SequentialOnceRuleController seqOnceTopLevel = new SequentialOnceRuleController(false);
         defaultPhysicalRewrites
                 .add(new Pair<>(seqOnceCtrl, RuleCollections.buildPhysicalRewritesAllLevelsRuleCollection()));
         defaultPhysicalRewrites
-                .add(new Pair<>(seqOnceTopLevel, RuleCollections.buildPhysicalRewritesTopLevelRuleCollection()));
+                .add(new Pair<>(seqOnceTopLevel, RuleCollections.buildPhysicalRewritesTopLevelRuleCollection(appCtx)));
         defaultPhysicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.prepareForJobGenRuleCollection()));
         return defaultPhysicalRewrites;
     }
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
index af07355..dabac3d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
@@ -20,6 +20,7 @@
 
 import java.util.List;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
@@ -31,13 +32,15 @@
      * @return the logical rewrites
      * @throws AlgebricksException
      */
-    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites()
+    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites(
+            ICcApplicationContext appCtx)
             throws AlgebricksException;
 
     /**
      * @return the physical rewrites
      */
-    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites()
+    public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites(
+            ICcApplicationContext appCtx)
             throws AlgebricksException;
 
 }
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java
index 54bb0bd..9f88bb6 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java
@@ -20,6 +20,7 @@
 
 import java.util.List;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.functions.FunctionDescriptorTag;
 import org.apache.asterix.external.library.ExternalFunctionDescriptorProvider;
 import org.apache.asterix.formats.base.IDataFormat;
@@ -136,8 +137,9 @@
             IDataFormat format = FormatUtils.getDefaultFormat();
             fd = format.resolveFunction(expr, env);
         } else {
+            ICcApplicationContext appCtx = (ICcApplicationContext) context.getAppContext();
             fd = ExternalFunctionDescriptorProvider
-                    .getExternalFunctionDescriptor((IExternalFunctionInfo) expr.getFunctionInfo());
+                    .getExternalFunctionDescriptor((IExternalFunctionInfo) expr.getFunctionInfo(), appCtx);
         }
         return fd.createEvaluatorFactory(args);
     }
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
index ec73185..d94a045 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
@@ -23,6 +23,7 @@
 import java.util.LinkedList;
 import java.util.List;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.optimizer.rules.AddEquivalenceClassForRecordConstructorRule;
 import org.apache.asterix.optimizer.rules.AsterixExtractFunctionsFromJoinConditionRule;
 import org.apache.asterix.optimizer.rules.AsterixInlineVariablesRule;
@@ -161,7 +162,7 @@
         return Collections.singletonList(new FullTextContainsParameterCheckRule());
     }
 
-    public static final List<IAlgebraicRewriteRule> buildNormalizationRuleCollection() {
+    public static final List<IAlgebraicRewriteRule> buildNormalizationRuleCollection(ICcApplicationContext appCtx) {
         List<IAlgebraicRewriteRule> normalization = new LinkedList<>();
         normalization.add(new ResolveVariableRule());
         normalization.add(new CheckInsertUpsertReturningRule());
@@ -186,7 +187,7 @@
         // so that PushAggFunc can happen in fewer places.
         normalization.add(new PushAggFuncIntoStandaloneAggregateRule());
         normalization.add(new ListifyUnnestingFunctionRule());
-        normalization.add(new ConstantFoldingRule());
+        normalization.add(new ConstantFoldingRule(appCtx));
         normalization.add(new RemoveRedundantSelectRule());
         normalization.add(new UnnestToDataScanRule());
         normalization.add(new MetaFunctionToMetaVariableRule());
@@ -236,7 +237,7 @@
         return condPushDownAndJoinInference;
     }
 
-    public static final List<IAlgebraicRewriteRule> buildLoadFieldsRuleCollection() {
+    public static final List<IAlgebraicRewriteRule> buildLoadFieldsRuleCollection(ICcApplicationContext appCtx) {
         List<IAlgebraicRewriteRule> fieldLoads = new LinkedList<>();
         fieldLoads.add(new LoadRecordFieldsRule());
         fieldLoads.add(new PushFieldAccessRule());
@@ -245,7 +246,7 @@
         fieldLoads.add(new RemoveRedundantVariablesRule());
         fieldLoads.add(new AsterixInlineVariablesRule());
         fieldLoads.add(new RemoveUnusedAssignAndAggregateRule());
-        fieldLoads.add(new ConstantFoldingRule());
+        fieldLoads.add(new ConstantFoldingRule(appCtx));
         fieldLoads.add(new RemoveRedundantSelectRule());
         fieldLoads.add(new FeedScanCollectionToUnnest());
         fieldLoads.add(new NestedSubplanToJoinRule());
@@ -340,13 +341,14 @@
         return physicalRewritesAllLevels;
     }
 
-    public static final List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection() {
+    public static final List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection(
+            ICcApplicationContext appCtx) {
         List<IAlgebraicRewriteRule> physicalRewritesTopLevel = new LinkedList<>();
         physicalRewritesTopLevel.add(new PushNestedOrderByUnderPreSortedGroupByRule());
         physicalRewritesTopLevel.add(new CopyLimitDownRule());
         // CopyLimitDownRule may generates non-topmost limits with numeric_adds functions.
         // We are going to apply a constant folding rule again for this case.
-        physicalRewritesTopLevel.add(new ConstantFoldingRule());
+        physicalRewritesTopLevel.add(new ConstantFoldingRule(appCtx));
         physicalRewritesTopLevel.add(new PushLimitIntoOrderByRule());
         physicalRewritesTopLevel.add(new IntroduceProjectsRule());
         physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
index 98e9f9d..5ef41c4 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -24,6 +24,7 @@
 import java.util.List;
 
 import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.dataflow.data.common.ExpressionTypeComputer;
 import org.apache.asterix.dataflow.data.nontagged.MissingWriterFactory;
 import org.apache.asterix.formats.nontagged.ADMPrinterFactoryProvider;
@@ -55,8 +56,8 @@
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import org.apache.hyracks.algebricks.core.algebra.expressions.ExpressionRuntimeProvider;
+import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
@@ -80,16 +81,16 @@
 public class ConstantFoldingRule implements IAlgebraicRewriteRule {
 
     private final ConstantFoldingVisitor cfv = new ConstantFoldingVisitor();
+    private final JobGenContext jobGenCtx;
 
     // Function Identifier sets that the ConstantFolding rule should skip to apply.
     // Most of them are record-related functions.
     private static final ImmutableSet<FunctionIdentifier> FUNC_ID_SET_THAT_SHOULD_NOT_BE_APPLIED =
-            ImmutableSet.of(BuiltinFunctions.RECORD_MERGE, BuiltinFunctions.ADD_FIELDS,
-                    BuiltinFunctions.REMOVE_FIELDS, BuiltinFunctions.GET_RECORD_FIELDS,
-                    BuiltinFunctions.GET_RECORD_FIELD_VALUE, BuiltinFunctions.FIELD_ACCESS_NESTED,
-                    BuiltinFunctions.GET_ITEM, BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR,
-                    BuiltinFunctions.FIELD_ACCESS_BY_INDEX, BuiltinFunctions.CAST_TYPE,
-                    BuiltinFunctions.META, BuiltinFunctions.META_KEY);
+            ImmutableSet.of(BuiltinFunctions.RECORD_MERGE, BuiltinFunctions.ADD_FIELDS, BuiltinFunctions.REMOVE_FIELDS,
+                    BuiltinFunctions.GET_RECORD_FIELDS, BuiltinFunctions.GET_RECORD_FIELD_VALUE,
+                    BuiltinFunctions.FIELD_ACCESS_NESTED, BuiltinFunctions.GET_ITEM,
+                    BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR, BuiltinFunctions.FIELD_ACCESS_BY_INDEX,
+                    BuiltinFunctions.CAST_TYPE, BuiltinFunctions.META, BuiltinFunctions.META_KEY);
 
     /** Throws exceptions in substituiteProducedVariable, setVarType, and one getVarType method. */
     private static final IVariableTypeEnvironment _emptyTypeEnv = new IVariableTypeEnvironment() {
@@ -121,16 +122,17 @@
         }
     };
 
-    private static final JobGenContext _jobGenCtx = new JobGenContext(null, null, null,
-            SerializerDeserializerProvider.INSTANCE, BinaryHashFunctionFactoryProvider.INSTANCE,
-            BinaryHashFunctionFamilyProvider.INSTANCE, BinaryComparatorFactoryProvider.INSTANCE,
-            TypeTraitProvider.INSTANCE, BinaryBooleanInspector.FACTORY, BinaryIntegerInspector.FACTORY,
-            ADMPrinterFactoryProvider.INSTANCE, MissingWriterFactory.INSTANCE, null,
-            new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE),
-            ExpressionTypeComputer.INSTANCE, null, null, null, null, GlobalConfig.DEFAULT_FRAME_SIZE, null);
-
     private static final IOperatorSchema[] _emptySchemas = new IOperatorSchema[] {};
 
+    public ConstantFoldingRule(ICcApplicationContext appCtx) {
+        jobGenCtx = new JobGenContext(null, null, appCtx, SerializerDeserializerProvider.INSTANCE,
+                BinaryHashFunctionFactoryProvider.INSTANCE, BinaryHashFunctionFamilyProvider.INSTANCE,
+                BinaryComparatorFactoryProvider.INSTANCE, TypeTraitProvider.INSTANCE, BinaryBooleanInspector.FACTORY,
+                BinaryIntegerInspector.FACTORY, ADMPrinterFactoryProvider.INSTANCE, MissingWriterFactory.INSTANCE, null,
+                new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE), ExpressionTypeComputer.INSTANCE,
+                null, null, null, null, GlobalConfig.DEFAULT_FRAME_SIZE, null);
+    }
+
     @Override
     public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
             throws AlgebricksException {
@@ -168,13 +170,13 @@
         @Override
         public Pair<Boolean, ILogicalExpression> visitConstantExpression(ConstantExpression expr, Void arg)
                 throws AlgebricksException {
-            return new Pair<Boolean, ILogicalExpression>(false, expr);
+            return new Pair<>(false, expr);
         }
 
         @Override
         public Pair<Boolean, ILogicalExpression> visitVariableReferenceExpression(VariableReferenceExpression expr,
                 Void arg) throws AlgebricksException {
-            return new Pair<Boolean, ILogicalExpression>(false, expr);
+            return new Pair<>(false, expr);
         }
 
         @Override
@@ -182,12 +184,12 @@
                 Void arg) throws AlgebricksException {
             boolean changed = changeRec(expr, arg);
             if (!checkArgs(expr) || !expr.isFunctional()) {
-                return new Pair<Boolean, ILogicalExpression>(changed, expr);
+                return new Pair<>(changed, expr);
             }
 
             // Skip Constant Folding for the record-related functions.
             if (FUNC_ID_SET_THAT_SHOULD_NOT_BE_APPLIED.contains(expr.getFunctionIdentifier())) {
-                return new Pair<Boolean, ILogicalExpression>(false, null);
+                return new Pair<>(false, null);
             }
 
             //Current List SerDe assumes a strongly typed list, so we do not constant fold the list constructors if they are not strongly typed
@@ -199,7 +201,7 @@
                     //case1: listType == null,  could be a nested list inside a list<ANY>
                     //case2: itemType = ANY
                     //case3: itemType = a nested list
-                    return new Pair<Boolean, ILogicalExpression>(false, null);
+                    return new Pair<>(false, null);
                 }
             }
             if (expr.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
@@ -208,20 +210,20 @@
                 int k = rt.getFieldIndex(str);
                 if (k >= 0) {
                     // wait for the ByNameToByIndex rule to apply
-                    return new Pair<Boolean, ILogicalExpression>(changed, expr);
+                    return new Pair<>(changed, expr);
                 }
             }
 
-            IScalarEvaluatorFactory fact = _jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr,
-                    _emptyTypeEnv, _emptySchemas, _jobGenCtx);
+            IScalarEvaluatorFactory fact = jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr,
+                    _emptyTypeEnv, _emptySchemas, jobGenCtx);
             try {
                 IScalarEvaluator eval = fact.createScalarEvaluator(null);
                 eval.evaluate(null, p);
                 Object t = _emptyTypeEnv.getType(expr);
 
                 @SuppressWarnings("rawtypes")
-                ISerializerDeserializer serde = _jobGenCtx.getSerializerDeserializerProvider()
-                        .getSerializerDeserializer(t);
+                ISerializerDeserializer serde =
+                        jobGenCtx.getSerializerDeserializerProvider().getSerializerDeserializer(t);
                 bbis.setByteBuffer(ByteBuffer.wrap(p.getByteArray(), p.getStartOffset(), p.getLength()), 0);
                 IAObject o = (IAObject) serde.deserialize(dis);
                 return new Pair<>(true, new ConstantExpression(new AsterixConstantValue(o)));
@@ -234,21 +236,21 @@
         public Pair<Boolean, ILogicalExpression> visitAggregateFunctionCallExpression(
                 AggregateFunctionCallExpression expr, Void arg) throws AlgebricksException {
             boolean changed = changeRec(expr, arg);
-            return new Pair<Boolean, ILogicalExpression>(changed, expr);
+            return new Pair<>(changed, expr);
         }
 
         @Override
         public Pair<Boolean, ILogicalExpression> visitStatefulFunctionCallExpression(
                 StatefulFunctionCallExpression expr, Void arg) throws AlgebricksException {
             boolean changed = changeRec(expr, arg);
-            return new Pair<Boolean, ILogicalExpression>(changed, expr);
+            return new Pair<>(changed, expr);
         }
 
         @Override
         public Pair<Boolean, ILogicalExpression> visitUnnestingFunctionCallExpression(
                 UnnestingFunctionCallExpression expr, Void arg) throws AlgebricksException {
             boolean changed = changeRec(expr, arg);
-            return new Pair<Boolean, ILogicalExpression>(changed, expr);
+            return new Pair<>(changed, expr);
         }
 
         private boolean changeRec(AbstractFunctionCallExpression expr, Void arg) throws AlgebricksException {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
index 5c13467..a24c6e0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
@@ -99,7 +99,7 @@
         }
 
         InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) currentOp;
-        if (insertOp.getOperation() != Kind.INSERT) {
+        if (insertOp.getOperation() != Kind.INSERT && insertOp.getOperation() != Kind.UPSERT) {
             return false;
         }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 3cdeb6f..e2589da 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -25,6 +25,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.statement.DatasetDecl;
@@ -36,7 +37,6 @@
 import org.apache.asterix.metadata.dataset.hints.DatasetHints;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.utils.MetadataConstants;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -49,11 +49,12 @@
 
     private static final Logger LOGGER = Logger.getLogger(AbstractLangTranslator.class.getName());
 
-    public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+    public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt)
+            throws AsterixException {
 
         if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)
                 && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
-            int maxWaitCycles = AppContextInfo.INSTANCE.getExternalProperties().getMaxWaitClusterActive();
+            int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
             try {
                 ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
             } catch (HyracksDataException e) {
@@ -80,7 +81,7 @@
         }
 
         if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
-            int maxWaitCycles = AppContextInfo.INSTANCE.getExternalProperties().getMaxWaitClusterActive();
+            int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
             int waitCycleCount = 0;
             try {
                 while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
@@ -162,7 +163,7 @@
                     Pair<Boolean, String> validationResult = null;
                     StringBuffer errorMsgBuffer = new StringBuffer();
                     for (Entry<String, String> hint : hints.entrySet()) {
-                        validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
+                        validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
                         if (!validationResult.first) {
                             errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
                                     + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java
index cf54921..23365de 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java
@@ -21,6 +21,7 @@
 import java.util.List;
 
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 
@@ -44,6 +45,6 @@
      *            provides storage related components
      * @return an implementation of {@code IStatementExecutor} thaxt is used to execute the passed list of statements
      */
-    IStatementExecutor create(List<Statement> statements, SessionConfig conf,
+    IStatementExecutor create(ICcApplicationContext appCtx, List<Statement> statements, SessionConfig conf,
             ILangCompilationProvider compilationProvider, IStorageComponentProvider storageComponentProvider);
 }
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index c4cc486..4b4ef8e 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -34,6 +34,7 @@
 import org.apache.asterix.algebra.operators.CommitOperator;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.functions.FunctionConstants;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -94,7 +95,6 @@
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.runtime.formats.FormatUtils;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement;
 import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
 import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
@@ -316,7 +316,7 @@
         if (outputDatasetName == null) {
             FileSplit outputFileSplit = metadataProvider.getOutputFile();
             if (outputFileSplit == null) {
-                outputFileSplit = getDefaultOutputFileLocation();
+                outputFileSplit = getDefaultOutputFileLocation(metadataProvider.getApplicationContext());
             }
             metadataProvider.setOutputFile(outputFileSplit);
 
@@ -606,8 +606,8 @@
                 }
             }
             // A change feed, we don't need the assign to access PKs
-            upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading,
-                    metaExpSingletonList, InsertDeleteUpsertOperator.Kind.UPSERT, false);
+            upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, metaExpSingletonList,
+                    InsertDeleteUpsertOperator.Kind.UPSERT, false);
             // Create and add a new variable used for representing the original record
             upsertOp.setPrevRecordVar(context.newVar());
             upsertOp.setPrevRecordType(targetDatasource.getItemType());
@@ -730,11 +730,11 @@
                 dataset.getDatasetDetails(), domain);
     }
 
-    private FileSplit getDefaultOutputFileLocation() throws MetadataException {
+    private FileSplit getDefaultOutputFileLocation(ICcApplicationContext appCtx) throws MetadataException {
         String outputDir = System.getProperty("java.io.tmpDir");
         String filePath =
                 outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX + outputFileID.incrementAndGet();
-        MetadataProperties metadataProperties = AppContextInfo.INSTANCE.getMetadataProperties();
+        MetadataProperties metadataProperties = appCtx.getMetadataProperties();
         return new ManagedFileSplit(metadataProperties.getMetadataNodeName(), filePath);
     }
 
@@ -1005,8 +1005,8 @@
         arguments.add(new MutableObject<>(ConstantExpression.TRUE));
         arguments.add(new MutableObject<>(new VariableReferenceExpression(opAndVarForThen.second)));
         arguments.add(new MutableObject<>(new VariableReferenceExpression(opAndVarForElse.second)));
-        AbstractFunctionCallExpression swithCaseExpr = new ScalarFunctionCallExpression(
-                FunctionUtil.getFunctionInfo(BuiltinFunctions.SWITCH_CASE), arguments);
+        AbstractFunctionCallExpression swithCaseExpr =
+                new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.SWITCH_CASE), arguments);
         AssignOperator assignOp = new AssignOperator(selectVar, new MutableObject<>(swithCaseExpr));
         assignOp.getInputs().add(new MutableObject<>(opAndVarForElse.first));
 
@@ -1176,8 +1176,7 @@
         firstOp.getInputs().add(topOp);
         topOp = lastOp;
 
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 =
-                langExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = langExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
 
         AggregateFunctionCallExpression fAgg;
         SelectOperator s;
@@ -1219,8 +1218,7 @@
             Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = langExprToAlgExpression(fb.getLeftExpr(), topOp);
             f.getArguments().add(new MutableObject<>(eo1.first));
             topOp = eo1.second;
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 =
-                    langExprToAlgExpression(fb.getRightExpr(), topOp);
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = langExprToAlgExpression(fb.getRightExpr(), topOp);
             f.getArguments().add(new MutableObject<>(eo2.first));
             topOp = eo2.second;
         }
@@ -1699,9 +1697,8 @@
         Mutable<ILogicalExpression> expr = new MutableObject<>(
                 new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.IS_UNKOWN),
                         new ArrayList<>(Collections.singletonList(new MutableObject<>(logicalExpr)))));
-        arguments.add(new MutableObject<>(
-                new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT),
-                        new ArrayList<>(Collections.singletonList(expr)))));
+        arguments.add(new MutableObject<>(new ScalarFunctionCallExpression(
+                FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT), new ArrayList<>(Collections.singletonList(expr)))));
         return new MutableObject<>(
                 new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.AND), arguments));
     }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
index f0e8588..b12935d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -63,7 +63,6 @@
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.optimizer.base.FuzzyUtils;
 import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
 import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
 import org.apache.asterix.translator.IStatementExecutor.Stats;
@@ -113,13 +112,13 @@
     private static final int MIN_FRAME_LIMIT_FOR_JOIN = 5;
 
     // A white list of supported configurable parameters.
-    private static final Set<String> CONFIGURABLE_PARAMETER_NAMES = ImmutableSet.of(
-            CompilerProperties.COMPILER_JOINMEMORY_KEY, CompilerProperties.COMPILER_GROUPMEMORY_KEY,
-            CompilerProperties.COMPILER_SORTMEMORY_KEY, CompilerProperties.COMPILER_PARALLELISM_KEY,
-            FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
-            FuzzyUtils.SIM_THRESHOLD_PROP_NAME, SubscribeFeedStatement.WAIT_FOR_COMPLETION,
-            FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS, "inline_with", "hash_merge",
-            "output-record-type");
+    private static final Set<String> CONFIGURABLE_PARAMETER_NAMES =
+            ImmutableSet.of(CompilerProperties.COMPILER_JOINMEMORY_KEY, CompilerProperties.COMPILER_GROUPMEMORY_KEY,
+                    CompilerProperties.COMPILER_SORTMEMORY_KEY, CompilerProperties.COMPILER_PARALLELISM_KEY,
+                    FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
+                    FuzzyUtils.SIM_THRESHOLD_PROP_NAME, SubscribeFeedStatement.WAIT_FOR_COMPLETION,
+                    FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS, "inline_with",
+                    "hash_merge", "output-record-type");
 
     private final IRewriterFactory rewriterFactory;
     private final IAstPrintVisitorFactory astPrintVisitorFactory;
@@ -145,8 +144,7 @@
                 IExpressionEvalSizeComputer expressionEvalSizeComputer,
                 IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
                 IExpressionTypeComputer expressionTypeComputer, IMissableTypeComputer missableTypeComputer,
-                IConflictingTypeResolver conflictingTypeResolver,
-                PhysicalOptimizationConfig physicalOptimizationConfig,
+                IConflictingTypeResolver conflictingTypeResolver, PhysicalOptimizationConfig physicalOptimizationConfig,
                 AlgebricksPartitionConstraint clusterLocations) {
             return new AlgebricksOptimizationContext(varCounter, expressionEvalSizeComputer,
                     mergeAggregationExpressionFactory, expressionTypeComputer, missableTypeComputer,
@@ -222,7 +220,7 @@
             }
             printPlanPostfix(conf);
         }
-        CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
+        CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties();
         int frameSize = compilerProperties.getFrameSize();
         Map<String, String> querySpecificConfig = metadataProvider.getConfig();
         validateConfig(querySpecificConfig); // Validates the user-overridden query parameters.
@@ -243,8 +241,8 @@
         HeuristicCompilerFactoryBuilder builder =
                 new HeuristicCompilerFactoryBuilder(OptimizationContextFactory.INSTANCE);
         builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
-        builder.setLogicalRewrites(ruleSetFactory.getLogicalRewrites());
-        builder.setPhysicalRewrites(ruleSetFactory.getPhysicalRewrites());
+        builder.setLogicalRewrites(ruleSetFactory.getLogicalRewrites(metadataProvider.getApplicationContext()));
+        builder.setPhysicalRewrites(ruleSetFactory.getPhysicalRewrites(metadataProvider.getApplicationContext()));
         IDataFormat format = metadataProvider.getFormat();
         ICompilerFactory compilerFactory = builder.create();
         builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer());
@@ -282,7 +280,8 @@
             try {
                 LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
                 PlanPrettyPrinter.printPlan(plan, pvisitor, 0);
-                ResultUtil.printResults(pvisitor.get().toString(), conf, new Stats(), null);
+                ResultUtil.printResults(metadataProvider.getApplicationContext(), pvisitor.get().toString(), conf,
+                        new Stats(), null);
                 return null;
             } catch (IOException e) {
                 throw new AlgebricksException(e);
@@ -296,8 +295,7 @@
         builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
         builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
         builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider());
-        builder.setExpressionRuntimeProvider(
-                new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE));
+        builder.setExpressionRuntimeProvider(new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE));
         builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
         builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
         builder.setMissingWriterFactory(format.getMissingWriterFactory());
@@ -327,7 +325,7 @@
 
         JobEventListenerFactory jobEventListenerFactory =
                 new JobEventListenerFactory(asterixJobId, metadataProvider.isWriteTransaction());
-        JobSpecification spec = compiler.createJob(AppContextInfo.INSTANCE, jobEventListenerFactory);
+        JobSpecification spec = compiler.createJob(metadataProvider.getApplicationContext(), jobEventListenerFactory);
 
         // When the top-level statement is a query, the statement parameter is null.
         if (statement == null) {
@@ -465,8 +463,7 @@
             int minFrameLimit)
             throws AlgebricksException {
         IOptionType<Long> longBytePropertyInterpreter = OptionTypes.LONG_BYTE_UNIT;
-        long memBudget =
-                parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.parse(parameter);
+        long memBudget = parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.parse(parameter);
         int frameLimit = (int) (memBudget / frameSize);
         if (frameLimit < minFrameLimit) {
             throw AsterixException.create(ErrorCode.COMPILATION_BAD_QUERY_PARAMETER_VALUE, parameterName,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java
index d8a2fab..6a3472d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java
@@ -20,7 +20,7 @@
 
 import org.apache.asterix.app.nc.NCAppRuntimeContext;
 import org.apache.asterix.common.api.ThreadExecutor;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.transactions.IAppRuntimeContextProvider;
 import org.apache.asterix.common.transactions.ITransactionSubsystem;
@@ -85,7 +85,7 @@
     }
 
     @Override
-    public IAppRuntimeContext getAppContext() {
+    public INcApplicationContext getAppContext() {
         return asterixAppRuntimeContext;
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
index f79a2da..a9c4b39 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
@@ -26,6 +26,7 @@
 import java.util.concurrent.ConcurrentMap;
 
 import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
@@ -34,6 +35,7 @@
 import org.apache.hyracks.http.server.AbstractServlet;
 
 public class AbstractQueryApiServlet extends AbstractServlet {
+    protected final ICcApplicationContext appCtx;
 
     public enum ResultFields {
         REQUEST_ID("requestID"),
@@ -91,8 +93,9 @@
         }
     }
 
-    AbstractQueryApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+    AbstractQueryApiServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx, String[] paths) {
         super(ctx, paths);
+        this.appCtx = appCtx;
     }
 
     protected IHyracksDataset getHyracksDataset() throws Exception { // NOSONAR
@@ -101,8 +104,8 @@
             if (hds == null) {
                 hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
                 if (hds == null) {
-                    hds = new HyracksDataset(getHyracksClientConnection(), ResultReader.FRAME_SIZE,
-                            ResultReader.NUM_READERS);
+                    hds = new HyracksDataset(getHyracksClientConnection(),
+                            appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
                     ctx.put(HYRACKS_DATASET_ATTR, hds);
                 }
             }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
index e435da7..3384332 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
@@ -38,6 +38,7 @@
 import org.apache.asterix.app.result.ResultReader;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.aql.parser.TokenMgrError;
@@ -67,15 +68,17 @@
     private static final Logger LOGGER = Logger.getLogger(ApiServlet.class.getName());
     public static final String HTML_STATEMENT_SEPARATOR = "<!-- BEGIN -->";
 
+    private final ICcApplicationContext appCtx;
     private final ILangCompilationProvider aqlCompilationProvider;
     private final ILangCompilationProvider sqlppCompilationProvider;
     private final IStatementExecutorFactory statementExectorFactory;
     private final IStorageComponentProvider componentProvider;
 
-    public ApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public ApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider aqlCompilationProvider, ILangCompilationProvider sqlppCompilationProvider,
             IStatementExecutorFactory statementExecutorFactory, IStorageComponentProvider componentProvider) {
         super(ctx, paths);
+        this.appCtx = appCtx;
         this.aqlCompilationProvider = aqlCompilationProvider;
         this.sqlppCompilationProvider = sqlppCompilationProvider;
         this.statementExectorFactory = statementExecutorFactory;
@@ -126,7 +129,8 @@
                 synchronized (ctx) {
                     hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
                     if (hds == null) {
-                        hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+                        hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(),
+                                ResultReader.NUM_READERS);
                         ctx.put(HYRACKS_DATASET_ATTR, hds);
                     }
                 }
@@ -140,7 +144,7 @@
             sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam),
                     isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
             MetadataManager.INSTANCE.init();
-            IStatementExecutor translator = statementExectorFactory.create(aqlStatements, sessionConfig,
+            IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionConfig,
                     compilationProvider, componentProvider);
             double duration;
             long startTime = System.currentTimeMillis();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
index 984eef2..82e8f7a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
@@ -28,7 +28,7 @@
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
 
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.api.config.IOption;
 import org.apache.hyracks.api.config.Section;
@@ -99,8 +99,8 @@
 
     protected ObjectNode getClusterStateJSON(IServletRequest request, String pathToNode) {
         ObjectNode json = ClusterStateManager.INSTANCE.getClusterStateDescription();
-        AppContextInfo appConfig = (AppContextInfo) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
-        json.putPOJO("config", ConfigUtils.getSectionOptionsForJSON(appConfig.getCCServiceContext().getAppConfig(),
+        CcApplicationContext appConfig = (CcApplicationContext) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
+        json.putPOJO("config", ConfigUtils.getSectionOptionsForJSON(appConfig.getServiceContext().getAppConfig(),
                 Section.COMMON, getConfigSelector()));
 
         ArrayNode ncs = (ArrayNode) json.get("ncs");
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index 869289a..cb3c063 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -28,6 +28,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.file.StorageComponentProvider;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -58,9 +59,11 @@
  */
 public class ConnectorApiServlet extends AbstractServlet {
     private static final Logger LOGGER = Logger.getLogger(ConnectorApiServlet.class.getName());
+    private ICcApplicationContext appCtx;
 
-    public ConnectorApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+    public ConnectorApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
         super(ctx, paths);
+        this.appCtx = appCtx;
     }
 
     @Override
@@ -91,7 +94,7 @@
             MetadataManager.INSTANCE.init();
             MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
             // Retrieves file splits of the dataset.
-            MetadataProvider metadataProvider = new MetadataProvider(null, new StorageComponentProvider());
+            MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
             try {
                 metadataProvider.setMetadataTxnContext(mdTxnCtx);
                 Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java
index a994470..4c1dbf7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java
@@ -21,6 +21,7 @@
 import java.util.concurrent.ConcurrentMap;
 
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -30,10 +31,10 @@
     private static final byte ALLOWED_CATEGORIES =
             Statement.Category.QUERY | Statement.Category.UPDATE | Statement.Category.DDL;
 
-    public DdlApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public DdlApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider componentProvider) {
-        super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+        super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
index 788927f..dcd0e70 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
@@ -35,7 +35,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.api.http.servlet.ServletConstants;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
@@ -49,9 +49,11 @@
 
 public class DiagnosticsApiServlet extends NodeControllerDetailsApiServlet {
     private static final Logger LOGGER = Logger.getLogger(DiagnosticsApiServlet.class.getName());
+    private final ICcApplicationContext appCtx;
 
-    public DiagnosticsApiServlet(ConcurrentMap<String, Object> ctx, String... paths) {
+    public DiagnosticsApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
         super(ctx, paths);
+        this.appCtx = appCtx;
     }
 
     @Override
@@ -93,10 +95,9 @@
                 executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(null, true, false)))));
 
         Map<String, Map<String, Future<ObjectNode>>> ncDataMap = new HashMap<>();
-        for (String nc : AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames()) {
+        for (String nc : appCtx.getMetadataProperties().getNodeNames()) {
             Map<String, Future<ObjectNode>> ncData = new HashMap<>();
-            ncData.put("threaddump",
-                    executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getThreadDump(nc)))));
+            ncData.put("threaddump", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getThreadDump(nc)))));
             ncData.put("config", executor
                     .submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(nc, false, true)))));
             ncData.put("stats", executor.submit(() -> fixupKeys(processNodeStats(hcc, nc))));
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java
index eafae35..0a461c7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java
@@ -21,6 +21,7 @@
 import java.util.concurrent.ConcurrentMap;
 
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -32,10 +33,10 @@
     private static final byte ALLOWED_CATEGORIES = Statement.Category.QUERY | Statement.Category.UPDATE
             | Statement.Category.DDL | Statement.Category.PROCEDURE;
 
-    public FullApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public FullApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider componentProvider) {
-        super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+        super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java
index 5075795..3e692d3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java
@@ -21,6 +21,7 @@
 import java.util.concurrent.ConcurrentMap;
 
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -29,10 +30,10 @@
 public class QueryApiServlet extends RestApiServlet {
     private static final byte ALLOWED_CATEGORIES = Statement.Category.QUERY;
 
-    public QueryApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public QueryApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider componentProvider) {
-        super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+        super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
index 57492be..401f55e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
@@ -25,6 +25,7 @@
 
 import org.apache.asterix.app.result.ResultHandle;
 import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.translator.IStatementExecutor.Stats;
 import org.apache.asterix.translator.SessionConfig;
 import org.apache.hyracks.api.dataset.DatasetJobRecord;
@@ -40,8 +41,8 @@
 public class QueryResultApiServlet extends AbstractQueryApiServlet {
     private static final Logger LOGGER = Logger.getLogger(QueryResultApiServlet.class.getName());
 
-    public QueryResultApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
-        super(ctx, paths);
+    public QueryResultApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
+        super(appCtx, ctx, paths);
     }
 
     @Override
@@ -94,7 +95,7 @@
             // originally determined there. Need to save this value on
             // some object that we can obtain here.
             SessionConfig sessionConfig = RestApiServlet.initResponse(request, response);
-            ResultUtil.printResults(resultReader, sessionConfig, new Stats(), null);
+            ResultUtil.printResults(appCtx, resultReader, sessionConfig, new Stats(), null);
         } catch (HyracksDataException e) {
             final int errorCode = e.getErrorCode();
             if (ErrorCode.NO_RESULTSET == errorCode) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
index faf9968..20bffc4 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
@@ -18,8 +18,6 @@
  */
 package org.apache.asterix.api.http.server;
 
-import static org.apache.asterix.translator.IStatementExecutor.ResultDelivery;
-
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.StringWriter;
@@ -34,6 +32,7 @@
 import org.apache.asterix.common.api.IClusterManagementWork;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.aql.parser.TokenMgrError;
@@ -42,6 +41,7 @@
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.asterix.translator.IStatementExecutor;
+import org.apache.asterix.translator.IStatementExecutor.ResultDelivery;
 import org.apache.asterix.translator.IStatementExecutor.Stats;
 import org.apache.asterix.translator.IStatementExecutorContext;
 import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -71,10 +71,10 @@
     private final IStorageComponentProvider componentProvider;
     private final IStatementExecutorContext queryCtx = new StatementExecutorContext();
 
-    public QueryServiceServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public QueryServiceServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider componentProvider) {
-        super(ctx, paths);
+        super(appCtx, ctx, paths);
         this.compilationProvider = compilationProvider;
         this.statementExecutorFactory = statementExecutorFactory;
         this.componentProvider = componentProvider;
@@ -179,7 +179,6 @@
         String clientContextID;
         String mode;
 
-
         @Override
         public String toString() {
             try {
@@ -236,8 +235,8 @@
         return SessionConfig.OutputFormat.CLEAN_JSON;
     }
 
-    private static SessionConfig createSessionConfig(RequestParameters param, String handleUrl, PrintWriter
-            resultWriter) {
+    private static SessionConfig createSessionConfig(RequestParameters param, String handleUrl,
+            PrintWriter resultWriter) {
         SessionConfig.ResultDecorator resultPrefix = new SessionConfig.ResultDecorator() {
             int resultNo = -1;
 
@@ -386,9 +385,12 @@
      * delivery mode. Usually there will be a "status" endpoint for ASYNC requests that exposes the status of the
      * execution and a "result" endpoint for DEFERRED requests that will deliver the result for a successful execution.
      *
-     * @param host hostname used for this request
-     * @param path servlet path for this request
-     * @param delivery ResultDelivery mode for this request
+     * @param host
+     *            hostname used for this request
+     * @param path
+     *            servlet path for this request
+     * @param delivery
+     *            ResultDelivery mode for this request
      * @return a handle (URL) that allows a client to access further information for this request
      */
     protected String getHandleUrl(String host, String path, ResultDelivery delivery) {
@@ -430,7 +432,8 @@
             List<Statement> statements = parser.parse();
             MetadataManager.INSTANCE.init();
             IStatementExecutor translator =
-                    statementExecutorFactory.create(statements, sessionConfig, compilationProvider, componentProvider);
+                    statementExecutorFactory.create(appCtx, statements, sessionConfig, compilationProvider,
+                            componentProvider);
             execStart = System.nanoTime();
             translator.compileAndExecute(getHyracksClientConnection(), getHyracksDataset(), delivery, stats,
                     param.clientContextID, queryCtx);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
index 8fbb4c5..d0c574e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
@@ -29,6 +29,7 @@
 
 import org.apache.asterix.app.result.ResultHandle;
 import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.api.dataset.DatasetJobRecord;
 import org.apache.hyracks.api.dataset.IHyracksDataset;
 import org.apache.hyracks.http.api.IServletRequest;
@@ -40,8 +41,8 @@
 public class QueryStatusApiServlet extends AbstractQueryApiServlet {
     private static final Logger LOGGER = Logger.getLogger(QueryStatusApiServlet.class.getName());
 
-    public QueryStatusApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
-        super(ctx, paths);
+    public QueryStatusApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
+        super(appCtx, ctx, paths);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
index 94ce017..c798a7c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
@@ -25,7 +25,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.StaticResourceServlet;
@@ -38,9 +38,11 @@
 
 public class QueryWebInterfaceServlet extends StaticResourceServlet {
     private static final Logger LOGGER = Logger.getLogger(QueryWebInterfaceServlet.class.getName());
+    private ICcApplicationContext appCtx;
 
-    public QueryWebInterfaceServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+    public QueryWebInterfaceServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx, String[] paths) {
         super(ctx, paths);
+        this.appCtx = appCtx;
     }
 
     @Override
@@ -57,7 +59,7 @@
     @Override
     protected void post(IServletRequest request, IServletResponse response) throws IOException {
         HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
-        ExternalProperties externalProperties = AppContextInfo.INSTANCE.getExternalProperties();
+        ExternalProperties externalProperties = appCtx.getExternalProperties();
         response.setStatus(HttpResponseStatus.OK);
         ObjectMapper om = new ObjectMapper();
         ObjectNode obj = om.createObjectNode();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
index 74290f3..e339ba9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
@@ -32,6 +32,7 @@
 import org.apache.asterix.app.translator.QueryTranslator;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.aql.parser.TokenMgrError;
@@ -61,15 +62,17 @@
 
 public abstract class RestApiServlet extends AbstractServlet {
     private static final Logger LOGGER = Logger.getLogger(RestApiServlet.class.getName());
+    private final ICcApplicationContext appCtx;
     private final ILangCompilationProvider compilationProvider;
     private final IParserFactory parserFactory;
     private final IStatementExecutorFactory statementExecutorFactory;
     private final IStorageComponentProvider componentProvider;
 
-    public RestApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public RestApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider componentProvider) {
         super(ctx, paths);
+        this.appCtx = appCtx;
         this.compilationProvider = compilationProvider;
         this.parserFactory = compilationProvider.getParserFactory();
         this.statementExecutorFactory = statementExecutorFactory;
@@ -188,7 +191,8 @@
                 synchronized (ctx) {
                     hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
                     if (hds == null) {
-                        hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+                        hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(),
+                                ResultReader.NUM_READERS);
                         ctx.put(HYRACKS_DATASET_ATTR, hds);
                     }
                 }
@@ -197,7 +201,7 @@
             List<Statement> aqlStatements = parser.parse();
             validate(aqlStatements);
             MetadataManager.INSTANCE.init();
-            IStatementExecutor translator = statementExecutorFactory.create(aqlStatements, sessionConfig,
+            IStatementExecutor translator = statementExecutorFactory.create(appCtx, aqlStatements, sessionConfig,
                     compilationProvider, componentProvider);
             translator.compileAndExecute(hcc, hds, resultDelivery, new IStatementExecutor.Stats());
         } catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
index a257958..fe6fa89 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
@@ -36,6 +36,7 @@
 import org.apache.asterix.app.result.ResultHandle;
 import org.apache.asterix.app.result.ResultPrinter;
 import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.translator.IStatementExecutor.Stats;
 import org.apache.asterix.translator.SessionConfig;
@@ -76,14 +77,14 @@
         return escaped;
     }
 
-    public static void printResults(ResultReader resultReader, SessionConfig conf, Stats stats,
-            ARecordType recordType) throws HyracksDataException {
-        new ResultPrinter(conf, stats, recordType).print(resultReader);
+    public static void printResults(ICcApplicationContext appCtx, ResultReader resultReader, SessionConfig conf,
+            Stats stats, ARecordType recordType) throws HyracksDataException {
+        new ResultPrinter(appCtx, conf, stats, recordType).print(resultReader);
     }
 
-    public static void printResults(String record, SessionConfig conf, Stats stats, ARecordType recordType)
-            throws HyracksDataException {
-        new ResultPrinter(conf, stats, recordType).print(record);
+    public static void printResults(ICcApplicationContext appCtx, String record, SessionConfig conf, Stats stats,
+            ARecordType recordType) throws HyracksDataException {
+        new ResultPrinter(appCtx, conf, stats, recordType).print(record);
     }
 
     public static void printResultHandle(SessionConfig conf, ResultHandle handle) throws HyracksDataException {
@@ -123,9 +124,8 @@
         pw.print("\": [{ \n");
         printField(pw, QueryServiceServlet.ErrorField.CODE.str(), "1");
         final String msg = rootCause.getMessage();
-        printField(pw, QueryServiceServlet.ErrorField.MSG.str(), JSONUtil
-                        .escape(msg != null ? msg : rootCause.getClass().getSimpleName()),
-                addStack);
+        printField(pw, QueryServiceServlet.ErrorField.MSG.str(),
+                JSONUtil.escape(msg != null ? msg : rootCause.getClass().getSimpleName()), addStack);
         pw.print(comma ? "\t}],\n" : "\t}]\n");
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java
index ad2c128..3650189 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java
@@ -21,6 +21,7 @@
 import java.util.concurrent.ConcurrentMap;
 
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -29,10 +30,10 @@
 public class UpdateApiServlet extends RestApiServlet {
     private static final byte ALLOWED_CATEGORIES = Statement.Category.QUERY | Statement.Category.UPDATE;
 
-    public UpdateApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+    public UpdateApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider componentProvider) {
-        super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+        super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
index a4cea39..5acba381 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
@@ -27,15 +27,17 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import io.netty.handler.codec.http.HttpResponseStatus;
-import org.apache.asterix.common.config.IPropertiesProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.AbstractServlet;
 import org.apache.hyracks.http.server.utils.HttpUtil;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import io.netty.handler.codec.http.HttpResponseStatus;
+
 public class VersionApiServlet extends AbstractServlet {
     private static final Logger LOGGER = Logger.getLogger(VersionApiServlet.class.getName());
 
@@ -46,7 +48,7 @@
     @Override
     protected void get(IServletRequest request, IServletResponse response) {
         response.setStatus(HttpResponseStatus.OK);
-        IPropertiesProvider props = (IPropertiesProvider) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
+        ICcApplicationContext props = (ICcApplicationContext) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
         Map<String, String> buildProperties = props.getBuildProperties().getAllProps();
         ObjectMapper om = new ObjectMapper();
         ObjectNode responseObject = om.createObjectNode();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
index d03e574..ecf2c53 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
@@ -25,6 +25,7 @@
 import org.apache.asterix.api.common.APIFramework;
 import org.apache.asterix.app.translator.QueryTranslator;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.utils.Job;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.IParser;
@@ -51,10 +52,12 @@
     private final APIFramework apiFramework;
     private final IStatementExecutorFactory statementExecutorFactory;
     private final IStorageComponentProvider storageComponentProvider;
+    private ICcApplicationContext appCtx;
 
-    public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText, PrintWriter writer,
-            ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
-            IStorageComponentProvider storageComponentProvider) {
+    public AsterixJavaClient(ICcApplicationContext appCtx, IHyracksClientConnection hcc, Reader queryText,
+            PrintWriter writer, ILangCompilationProvider compilationProvider,
+            IStatementExecutorFactory statementExecutorFactory, IStorageComponentProvider storageComponentProvider) {
+        this.appCtx = appCtx;
         this.hcc = hcc;
         this.queryText = queryText;
         this.writer = writer;
@@ -65,10 +68,10 @@
         parserFactory = compilationProvider.getParserFactory();
     }
 
-    public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText,
+    public AsterixJavaClient(ICcApplicationContext appCtx, IHyracksClientConnection hcc, Reader queryText,
             ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
             IStorageComponentProvider storageComponentProvider) {
-        this(hcc, queryText,
+        this(appCtx, hcc, queryText,
                 // This is a commandline client and so System.out is appropriate
                 new PrintWriter(System.out, true), // NOSONAR
                 compilationProvider, statementExecutorFactory, storageComponentProvider);
@@ -102,8 +105,8 @@
             conf.set(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS, true);
         }
 
-        IStatementExecutor translator =
-                statementExecutorFactory.create(statements, conf, compilationProvider, storageComponentProvider);
+        IStatementExecutor translator = statementExecutorFactory.create(appCtx, statements, conf, compilationProvider,
+                storageComponentProvider);
         translator.compileAndExecute(hcc, null, QueryTranslator.ResultDelivery.IMMEDIATE,
                 new IStatementExecutor.Stats());
         writer.flush();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java
index f43092b..372404c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java
@@ -36,7 +36,7 @@
         if (!allReported) {
             synchronized (this) {
                 if (!allReported) {
-                    if (reportedNodes.size() < ClusterStateManager.getNumberOfNodes()) {
+                    if (reportedNodes.size() < ClusterStateManager.INSTANCE.getNumberOfNodes()) {
                         return -1;
                     } else {
                         reportedNodes = null;
@@ -58,7 +58,7 @@
         if (!allReported) {
             globalResourceId.set(Math.max(maxResourceId, globalResourceId.get()));
             reportedNodes.add(nodeId);
-            if (reportedNodes.size() == ClusterStateManager.getNumberOfNodes()) {
+            if (reportedNodes.size() == ClusterStateManager.INSTANCE.getNumberOfNodes()) {
                 reportedNodes = null;
                 allReported = true;
             }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
index b4e1c75..5f86c28 100755
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
@@ -50,11 +50,11 @@
 import org.apache.asterix.metadata.entities.Library;
 import org.apache.asterix.metadata.utils.MetadataUtil;
 import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
-import org.apache.hyracks.control.common.controllers.ControllerConfig;
 
 public class ExternalLibraryUtils {
 
     private static final Logger LOGGER = Logger.getLogger(ExternalLibraryUtils.class.getName());
+    private static final FilenameFilter nonHiddenFileNameFilter = (dir, name) -> !name.startsWith(".");
 
     private ExternalLibraryUtils() {
     }
@@ -72,19 +72,15 @@
         // directory exists?
         if (installLibDir.exists()) {
             // get the list of files in the directory
-            for (String dataverse : installLibDir.list()) {
-                File dataverseDir = new File(installLibDir, dataverse);
-                String[] libraries = dataverseDir.list();
-                for (String library : libraries) {
+            for (File dataverseDir : installLibDir.listFiles(File::isDirectory)) {
+                for (File libraryDir : dataverseDir.listFiles(File::isDirectory)) {
                     // for each file (library), register library
-                    registerLibrary(externalLibraryManager, dataverse, library);
+                    registerLibrary(externalLibraryManager, dataverseDir.getName(), libraryDir.getName());
                     // is metadata node?
                     if (isMetadataNode) {
                         // get library file
-                        File libraryDir = new File(installLibDir.getAbsolutePath() + File.separator + dataverse
-                                + File.separator + library);
                         // install if needed (i,e, add the functions, adapters, datasources, parsers to the metadata)
-                        installLibraryIfNeeded(dataverse, libraryDir, uninstalledLibs);
+                        installLibraryIfNeeded(dataverseDir.getName(), libraryDir, uninstalledLibs);
                     }
                 }
             }
@@ -105,7 +101,7 @@
         // directory exists?
         if (uninstallLibDir.exists()) {
             // list files
-            uninstallLibNames = uninstallLibDir.list();
+            uninstallLibNames = uninstallLibDir.list(nonHiddenFileNameFilter);
             for (String uninstallLibName : uninstallLibNames) {
                 // Get the <dataverse name - library name> pair
                 String[] components = uninstallLibName.split("\\.");
@@ -295,7 +291,6 @@
      *
      * @param dataverse
      * @param libraryName
-     * @param installLibDir
      * @throws Exception
      */
     protected static void registerLibrary(ILibraryManager externalLibraryManager, String dataverse, String libraryName)
@@ -395,16 +390,26 @@
      * @return the directory "$(ControllerConfig.defaultDir)/library": This needs to be improved
      */
     protected static File getLibraryInstallDir() {
-        String workingDir = System.getProperty("user.dir");
-        return new File(workingDir, "library");
+        // Check managix directory first. If not exists, check app home.
+        File installDir = new File(System.getProperty("user.dir"), "library");
+        if (!installDir.exists()) {
+            installDir = new File(System.getProperty("app.home", System.getProperty("user.home"))
+                    + File.separator + "lib" + File.separator + "udfs");
+        }
+        return installDir;
     }
 
     /**
      * @return the directory "$(ControllerConfig.defaultDir)/uninstall": This needs to be improved
      */
     protected static File getLibraryUninstallDir() {
-        String workingDir = System.getProperty("user.dir");
-        return new File(workingDir, "uninstall");
+        // Check managix directory first. If not exists, check app home.
+        File uninstallDir = new File(System.getProperty("user.dir"), "uninstall");
+        if (!uninstallDir.exists()) {
+            uninstallDir = new File(System.getProperty("app.home", System.getProperty("user.home"))
+                    + File.separator + "lib" + File.separator + "udfs" + File.separator + "uninstall");
+        }
+        return uninstallDir;
     }
 
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
index a0b4a2a..555f571 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
@@ -29,8 +29,8 @@
 
 import org.apache.asterix.active.ActiveManager;
 import org.apache.asterix.api.common.AppRuntimeContextProviderForRecovery;
-import org.apache.asterix.common.api.IAppRuntimeContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.ThreadExecutor;
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.config.ActiveProperties;
@@ -99,7 +99,7 @@
 import org.apache.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
 import org.apache.hyracks.storage.common.file.IResourceIdFactory;
 
-public class NCAppRuntimeContext implements IAppRuntimeContext {
+public class NCAppRuntimeContext implements INcApplicationContext {
     private static final Logger LOGGER = Logger.getLogger(NCAppRuntimeContext.class.getName());
 
     private ILSMMergePolicyFactory metadataMergePolicyFactory;
@@ -165,28 +165,28 @@
 
     @Override
     public void initialize(boolean initialRun) throws IOException, ACIDException {
-        ioManager = ncServiceContext.getIoManager();
-        threadExecutor = new ThreadExecutor(ncServiceContext.getThreadFactory());
+        ioManager = getServiceContext().getIoManager();
+        threadExecutor = new ThreadExecutor(getServiceContext().getThreadFactory());
         fileMapManager = new FileMapManager(ioManager);
         ICacheMemoryAllocator allocator = new HeapBufferAllocator();
         IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
         IPageReplacementStrategy prs = new ClockPageReplacementStrategy(allocator,
                 storageProperties.getBufferCachePageSize(), storageProperties.getBufferCacheNumPages());
 
-        AsynchronousScheduler.INSTANCE.init(ncServiceContext.getThreadFactory());
+        AsynchronousScheduler.INSTANCE.init(getServiceContext().getThreadFactory());
         lsmIOScheduler = AsynchronousScheduler.INSTANCE;
 
         metadataMergePolicyFactory = new PrefixMergePolicyFactory();
 
         ILocalResourceRepositoryFactory persistentLocalResourceRepositoryFactory =
-                new PersistentLocalResourceRepositoryFactory(ioManager, ncServiceContext.getNodeId(),
+                new PersistentLocalResourceRepositoryFactory(ioManager, getServiceContext().getNodeId(),
                         metadataProperties);
 
         localResourceRepository =
                 (PersistentLocalResourceRepository) persistentLocalResourceRepositoryFactory.createRepository();
 
         IAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AppRuntimeContextProviderForRecovery(this);
-        txnSubsystem = new TransactionSubsystem(ncServiceContext, ncServiceContext.getNodeId(),
+        txnSubsystem = new TransactionSubsystem(getServiceContext(), getServiceContext().getNodeId(),
                 asterixAppRuntimeContextProvider, txnProperties);
 
         IRecoveryManager recoveryMgr = txnSubsystem.getRecoveryManager();
@@ -202,11 +202,11 @@
 
         isShuttingdown = false;
 
-        activeManager = new ActiveManager(threadExecutor, ncServiceContext.getNodeId(),
+        activeManager = new ActiveManager(threadExecutor, getServiceContext().getNodeId(),
                 activeProperties.getMemoryComponentGlobalBudget(), compilerProperties.getFrameSize());
 
-        if (replicationProperties.isParticipant(ncServiceContext.getNodeId())) {
-            String nodeId = ncServiceContext.getNodeId();
+        if (replicationProperties.isParticipant(getServiceContext().getNodeId())) {
+            String nodeId = getServiceContext().getNodeId();
 
             replicaResourcesManager = new ReplicaResourcesManager(localResourceRepository, metadataProperties);
 
@@ -235,24 +235,24 @@
 
             //initialize replication channel
             replicationChannel = new ReplicationChannel(nodeId, replicationProperties, txnSubsystem.getLogManager(),
-                    replicaResourcesManager, replicationManager, ncServiceContext,
+                    replicaResourcesManager, replicationManager, getServiceContext(),
                     asterixAppRuntimeContextProvider);
 
             remoteRecoveryManager = new RemoteRecoveryManager(replicationManager, this, replicationProperties);
 
             bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
-                    storageProperties.getBufferCacheMaxOpenFiles(), ncServiceContext.getThreadFactory(),
+                    storageProperties.getBufferCacheMaxOpenFiles(), getServiceContext().getThreadFactory(),
                     replicationManager);
         } else {
             bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
-                    storageProperties.getBufferCacheMaxOpenFiles(), ncServiceContext.getThreadFactory());
+                    storageProperties.getBufferCacheMaxOpenFiles(), getServiceContext().getThreadFactory());
         }
 
         /*
          * The order of registration is important. The buffer cache must registered before recovery and transaction
          * managers. Notes: registered components are stopped in reversed order
          */
-        ILifeCycleComponentManager lccm = ncServiceContext.getLifeCycleComponentManager();
+        ILifeCycleComponentManager lccm = getServiceContext().getLifeCycleComponentManager();
         lccm.register((ILifeCycleComponent) bufferCache);
         /*
          * LogManager must be stopped after RecoveryManager, DatasetLifeCycleManager, and ReplicationManager
@@ -444,7 +444,7 @@
         MetadataNode.INSTANCE.initialize(this, ncExtensionManager.getMetadataTupleTranslatorProvider(),
                 ncExtensionManager.getMetadataExtensions());
 
-        proxy = (IAsterixStateProxy) ncServiceContext.getDistributedState();
+        proxy = (IAsterixStateProxy) getServiceContext().getDistributedState();
         if (proxy == null) {
             throw new IllegalStateException("Metadata node cannot access distributed state");
         }
@@ -453,9 +453,9 @@
         // This way we can delay the registration of the metadataNode until
         // it is completely initialized.
         MetadataManager.initialize(proxy, MetadataNode.INSTANCE);
-        MetadataBootstrap.startUniverse(ncServiceContext, newUniverse);
+        MetadataBootstrap.startUniverse(getServiceContext(), newUniverse);
         MetadataBootstrap.startDDLRecovery();
-        ncExtensionManager.initializeMetadata(ncServiceContext);
+        ncExtensionManager.initializeMetadata(getServiceContext());
 
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Metadata node bound");
@@ -466,7 +466,7 @@
     public void exportMetadataNodeStub() throws RemoteException {
         IMetadataNode stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE,
                 getMetadataProperties().getMetadataPort());
-        ((IAsterixStateProxy) ncServiceContext.getDistributedState()).setMetadataNode(stub);
+        ((IAsterixStateProxy) getServiceContext().getDistributedState()).setMetadataNode(stub);
     }
 
     @Override
@@ -482,4 +482,9 @@
     public IStorageComponentProvider getStorageComponentProvider() {
         return componentProvider;
     }
+
+    @Override
+    public INCServiceContext getServiceContext() {
+        return ncServiceContext;
+    }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java
index 91f3524..424e66c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.app.nc.task;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.service.IControllerService;
@@ -34,7 +34,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         try {
             if (exportStub) {
                 appContext.exportMetadataNodeStub();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java
index e77346a..b7701d2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.app.nc.task;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.asterix.common.transactions.ICheckpointManager;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,7 +30,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         ICheckpointManager checkpointMgr = appContext.getTransactionSubsystem().getCheckpointManager();
         checkpointMgr.doSharpCheckpoint();
     }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java
index ad9b28a..9506690 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java
@@ -19,7 +19,7 @@
 package org.apache.asterix.app.nc.task;
 
 import org.apache.asterix.app.external.ExternalLibraryUtils;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.service.IControllerService;
@@ -35,7 +35,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         try {
             ExternalLibraryUtils.setUpExternaLibraries(appContext.getLibraryManager(), metadataNode);
         } catch (Exception e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java
index 777097d..d52d15e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java
@@ -21,7 +21,7 @@
 import java.io.IOException;
 import java.util.Set;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -38,7 +38,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         try {
             appContext.getTransactionSubsystem().getRecoveryManager().startLocalRecovery(partitions);
         } catch (IOException | ACIDException e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java
index 65004b8..6415416 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.app.nc.task;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,7 +30,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         try {
             SystemState state = appContext.getTransactionSubsystem().getRecoveryManager().getSystemState();
             appContext.initializeMetadata(state == SystemState.PERMANENT_DATA_LOSS);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java
index 48479c5..f74a986 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java
@@ -22,7 +22,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.service.IControllerService;
@@ -38,7 +38,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         appContext.getRemoteRecoveryManager().doRemoteRecoveryPlan(recoveryPlan);
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java
index 9f04b10..8696d23 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.app.nc.task;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.service.IControllerService;
@@ -29,7 +29,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         appContext.getRemoteRecoveryManager().startFailbackProcess();
     }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
index d1754dd..799581b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
@@ -23,7 +23,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.asterix.common.config.MetadataProperties;
 import org.apache.asterix.hyracks.bootstrap.AsterixStateDumpHandler;
@@ -40,7 +40,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext applicationContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext applicationContext = (INcApplicationContext) cs.getApplicationContext();
         NCServiceContext serviceCtx = (NCServiceContext) cs.getContext();
         MetadataProperties metadataProperties = applicationContext.getMetadataProperties();
         if (LOGGER.isLoggable(Level.INFO)) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java
index 93e5b50..60d5c29 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.app.nc.task;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.asterix.common.replication.IReplicationManager;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,7 +30,7 @@
 
     @Override
     public void perform(IControllerService cs) throws HyracksDataException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
         try {
             //Open replication channel
             appContext.getReplicationChannel().start();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
index 45f96ac..db26c3a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
@@ -55,6 +55,7 @@
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.cluster.IClusterStateManager;
 import org.apache.asterix.common.config.ReplicationProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.common.messaging.api.ICCMessageBroker;
@@ -63,8 +64,8 @@
 import org.apache.asterix.common.replication.IReplicationStrategy;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
 import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.util.FaultToleranceUtil;
+import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
 import org.apache.hyracks.api.config.IOption;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -83,6 +84,7 @@
     private IClusterStateManager clusterManager;
     private ICCMessageBroker messageBroker;
     private IReplicationStrategy replicationStrategy;
+    private ICCServiceContext serviceCtx;
     private Set<String> pendingStartupCompletionNodes = new HashSet<>();
 
     @Override
@@ -135,8 +137,8 @@
     private synchronized void requestPartitionsTakeover(String failedNodeId) {
         //replica -> list of partitions to takeover
         Map<String, List<Integer>> partitionRecoveryPlan = new HashMap<>();
-        ReplicationProperties replicationProperties = AppContextInfo.INSTANCE.getReplicationProperties();
-
+        ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+        ReplicationProperties replicationProperties = appCtx.getReplicationProperties();
         //collect the partitions of the failed NC
         List<ClusterPartition> lostPartitions = getNodeAssignedPartitions(failedNodeId);
         if (!lostPartitions.isEmpty()) {
@@ -204,7 +206,8 @@
         planId2FailbackPlanMap.put(plan.getPlanId(), plan);
 
         //get all partitions this node requires to resync
-        ReplicationProperties replicationProperties = AppContextInfo.INSTANCE.getReplicationProperties();
+        ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+        ReplicationProperties replicationProperties = appCtx.getReplicationProperties();
         Set<String> nodeReplicas = replicationProperties.getNodeReplicasIds(failingBackNodeId);
         clusterManager.getClusterPartitons();
         for (String replicaId : nodeReplicas) {
@@ -255,7 +258,8 @@
                      * if the returning node is the original metadata node,
                      * then metadata node will change after the failback completes
                      */
-                    String originalMetadataNode = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataNodeName();
+                    ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+                    String originalMetadataNode = appCtx.getMetadataProperties().getMetadataNodeName();
                     if (originalMetadataNode.equals(failbackNode)) {
                         plan.setNodeToReleaseMetadataManager(currentMetadataNode);
                         currentMetadataNode = "";
@@ -399,7 +403,8 @@
 
     private synchronized void requestMetadataNodeTakeover() {
         //need a new node to takeover metadata node
-        ClusterPartition metadataPartiton = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataPartition();
+        ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+        ClusterPartition metadataPartiton = appCtx.getMetadataProperties().getMetadataPartition();
         //request the metadataPartition node to register itself as the metadata node
         TakeoverMetadataNodeRequestMessage takeoverRequest = new TakeoverMetadataNodeRequestMessage();
         try {
@@ -418,10 +423,11 @@
     }
 
     @Override
-    public IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker) {
+    public IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy) {
         AutoFaultToleranceStrategy ft = new AutoFaultToleranceStrategy();
-        ft.messageBroker = messageBroker;
+        ft.messageBroker = (ICCMessageBroker) serviceCtx.getMessageBroker();
         ft.replicationStrategy = replicationStrategy;
+        ft.serviceCtx = serviceCtx;
         return ft;
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java
index 8d382a1..4e8ecd9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java
@@ -21,15 +21,15 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.asterix.common.messaging.api.ICCMessageBroker;
 import org.apache.asterix.common.replication.IFaultToleranceStrategy;
 import org.apache.asterix.common.replication.IReplicationStrategy;
 import org.apache.asterix.event.schema.cluster.Cluster;
+import org.apache.hyracks.api.application.ICCServiceContext;
 
 public class FaultToleranceStrategyFactory {
 
-    private static final Map<String, Class<? extends IFaultToleranceStrategy>>
-    BUILT_IN_FAULT_TOLERANCE_STRATEGY = new HashMap<>();
+    private static final Map<String, Class<? extends IFaultToleranceStrategy>> BUILT_IN_FAULT_TOLERANCE_STRATEGY =
+            new HashMap<>();
 
     static {
         BUILT_IN_FAULT_TOLERANCE_STRATEGY.put("no_fault_tolerance", NoFaultToleranceStrategy.class);
@@ -42,14 +42,14 @@
     }
 
     public static IFaultToleranceStrategy create(Cluster cluster, IReplicationStrategy repStrategy,
-            ICCMessageBroker messageBroker) {
-        boolean highAvailabilityEnabled = cluster.getHighAvailability() != null
-                && cluster.getHighAvailability().getEnabled() != null
-                && Boolean.valueOf(cluster.getHighAvailability().getEnabled());
+            ICCServiceContext serviceCtx) {
+        boolean highAvailabilityEnabled =
+                cluster.getHighAvailability() != null && cluster.getHighAvailability().getEnabled() != null
+                        && Boolean.valueOf(cluster.getHighAvailability().getEnabled());
 
         if (!highAvailabilityEnabled || cluster.getHighAvailability().getFaultTolerance() == null
                 || cluster.getHighAvailability().getFaultTolerance().getStrategy() == null) {
-            return new NoFaultToleranceStrategy().from(repStrategy, messageBroker);
+            return new NoFaultToleranceStrategy().from(serviceCtx, repStrategy);
         }
         String strategyName = cluster.getHighAvailability().getFaultTolerance().getStrategy().toLowerCase();
         if (!BUILT_IN_FAULT_TOLERANCE_STRATEGY.containsKey(strategyName)) {
@@ -58,7 +58,7 @@
         }
         Class<? extends IFaultToleranceStrategy> clazz = BUILT_IN_FAULT_TOLERANCE_STRATEGY.get(strategyName);
         try {
-            return clazz.newInstance().from(repStrategy, messageBroker);
+            return clazz.newInstance().from(serviceCtx, repStrategy);
         } catch (InstantiationException | IllegalAccessException e) {
             throw new IllegalStateException(e);
         }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
index c40e236..1b57403 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
@@ -46,6 +46,7 @@
 import org.apache.asterix.common.api.INCLifecycleTask;
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.cluster.IClusterStateManager;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.common.messaging.api.ICCMessageBroker;
@@ -54,8 +55,8 @@
 import org.apache.asterix.common.replication.IReplicationStrategy;
 import org.apache.asterix.common.replication.Replica;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.util.FaultToleranceUtil;
+import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -66,6 +67,7 @@
     private String metadataNodeId;
     private IReplicationStrategy replicationStrategy;
     private ICCMessageBroker messageBroker;
+    private ICCServiceContext serviceCtx;
     private final Set<String> hotStandbyMetadataReplica = new HashSet<>();
     private final Set<String> failedNodes = new HashSet<>();
     private Set<String> pendingStartupCompletionNodes = new HashSet<>();
@@ -91,8 +93,8 @@
         }
         // If the failed node is the metadata node, ask its replicas to replay any committed jobs
         if (nodeId.equals(metadataNodeId)) {
-            int metadataPartitionId = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataPartition()
-                    .getPartitionId();
+            ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+            int metadataPartitionId = appCtx.getMetadataProperties().getMetadataPartition().getPartitionId();
             Set<Integer> metadataPartition = new HashSet<>(Arrays.asList(metadataPartitionId));
             Set<Replica> activeRemoteReplicas = replicationStrategy.getRemoteReplicas(metadataNodeId).stream()
                     .filter(replica -> !failedNodes.contains(replica.getId())).collect(Collectors.toSet());
@@ -110,10 +112,11 @@
     }
 
     @Override
-    public IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker) {
+    public IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy) {
         MetadataNodeFaultToleranceStrategy ft = new MetadataNodeFaultToleranceStrategy();
         ft.replicationStrategy = replicationStrategy;
-        ft.messageBroker = messageBroker;
+        ft.messageBroker = (ICCMessageBroker) serviceCtx.getMessageBroker();
+        ft.serviceCtx = serviceCtx;
         return ft;
     }
 
@@ -247,8 +250,8 @@
         // Construct recovery plan: Node => Set of partitions to recover from it
         Map<String, Set<Integer>> recoveryPlan = new HashMap<>();
         // Recover metadata partition from any metadata hot standby replica
-        int metadataPartitionId = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataPartition()
-                .getPartitionId();
+        ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+        int metadataPartitionId = appCtx.getMetadataProperties().getMetadataPartition().getPartitionId();
         Set<Integer> metadataPartition = new HashSet<>(Arrays.asList(metadataPartitionId));
         recoveryPlan.put(hotStandbyMetadataReplica.iterator().next(), metadataPartition);
         return new RemoteRecoveryTask(recoveryPlan);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
index b8b3c49..b9ea135 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
@@ -47,6 +47,7 @@
 import org.apache.asterix.common.replication.INCLifecycleMessage;
 import org.apache.asterix.common.replication.IReplicationStrategy;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
+import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class NoFaultToleranceStrategy implements IFaultToleranceStrategy {
@@ -54,8 +55,8 @@
     private static final Logger LOGGER = Logger.getLogger(NoFaultToleranceStrategy.class.getName());
     IClusterStateManager clusterManager;
     private String metadataNodeId;
-    private ICCMessageBroker messageBroker;
     private Set<String> pendingStartupCompletionNodes = new HashSet<>();
+    private ICCMessageBroker messageBroker;
 
     @Override
     public void notifyNodeJoin(String nodeId) throws HyracksDataException {
@@ -87,9 +88,9 @@
     }
 
     @Override
-    public IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker) {
+    public IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy) {
         NoFaultToleranceStrategy ft = new NoFaultToleranceStrategy();
-        ft.messageBroker = messageBroker;
+        ft.messageBroker = (ICCMessageBroker) serviceCtx.getMessageBroker();
         return ft;
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
index 0924838..feca7e8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
@@ -23,14 +23,14 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class CompleteFailbackRequestMessage extends AbstractFailbackPlanMessage {
+public class CompleteFailbackRequestMessage extends AbstractFailbackPlanMessage implements INcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(CompleteFailbackRequestMessage.class.getName());
@@ -62,9 +62,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
-        INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+    public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+        INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
         HyracksDataException hde = null;
         try {
             IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
@@ -73,8 +72,8 @@
             LOGGER.log(Level.SEVERE, "Failure during completion of failback process", e);
             hde = HyracksDataException.create(e);
         } finally {
-            CompleteFailbackResponseMessage reponse = new CompleteFailbackResponseMessage(planId,
-                    requestId, partitions);
+            CompleteFailbackResponseMessage reponse =
+                    new CompleteFailbackResponseMessage(planId, requestId, partitions);
             try {
                 broker.sendMessageToCC(reponse);
             } catch (Exception e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java
index fb45892..0c5678f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java
@@ -18,14 +18,15 @@
  */
 package org.apache.asterix.app.replication.message;
 
-import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-
 import java.util.Set;
 
-public class CompleteFailbackResponseMessage extends AbstractFailbackPlanMessage {
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class CompleteFailbackResponseMessage extends AbstractFailbackPlanMessage implements ICcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final Set<Integer> partitions;
@@ -49,8 +50,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
index 3af075e..03c7ac6 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
@@ -18,12 +18,13 @@
  */
 package org.apache.asterix.app.replication.message;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class NCLifecycleTaskReportMessage implements INCLifecycleMessage {
+public class NCLifecycleTaskReportMessage implements INCLifecycleMessage, ICcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final String nodeId;
@@ -36,8 +37,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     public String getNodeId() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
index abfd6b2..cefcf49 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
@@ -23,14 +23,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
 import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class PreparePartitionsFailbackRequestMessage extends AbstractFailbackPlanMessage {
+public class PreparePartitionsFailbackRequestMessage extends AbstractFailbackPlanMessage
+        implements INcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(PreparePartitionsFailbackRequestMessage.class.getName());
@@ -71,9 +72,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
-        INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+    public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+        INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
         /**
          * if the metadata partition will be failed back
          * we need to flush and close all datasets including metadata datasets
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java
index e02cd42..bea1039 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java
@@ -18,14 +18,16 @@
  */
 package org.apache.asterix.app.replication.message;
 
-import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-
 import java.util.Set;
 
-public class PreparePartitionsFailbackResponseMessage extends AbstractFailbackPlanMessage {
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class PreparePartitionsFailbackResponseMessage extends AbstractFailbackPlanMessage
+        implements ICcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final Set<Integer> partitions;
@@ -40,8 +42,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
index 5a73543..c8e2479 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
@@ -22,14 +22,14 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 import org.apache.hyracks.control.nc.NodeControllerService;
 
-public class ReplayPartitionLogsRequestMessage implements INCLifecycleMessage {
+public class ReplayPartitionLogsRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
 
     private static final Logger LOGGER = Logger.getLogger(ReplayPartitionLogsRequestMessage.class.getName());
     private static final long serialVersionUID = 1L;
@@ -40,9 +40,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        NodeControllerService ncs = (NodeControllerService) cs;
-        IAppRuntimeContext appContext = (IAppRuntimeContext) ncs.getApplicationContext();
+    public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+        NodeControllerService ncs = (NodeControllerService) appContext.getServiceContext().getControllerService();
         // Replay the logs for these partitions and flush any impacted dataset
         appContext.getRemoteRecoveryManager().replayReplicaPartitionLogs(partitions, true);
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java
index dc19735..e05fd47 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java
@@ -18,14 +18,15 @@
  */
 package org.apache.asterix.app.replication.message;
 
-import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-
 import java.util.Set;
 
-public class ReplayPartitionLogsResponseMessage implements INCLifecycleMessage {
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.common.replication.INCLifecycleMessage;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ReplayPartitionLogsResponseMessage implements INCLifecycleMessage, ICcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final Set<Integer> partitions;
@@ -37,8 +38,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     public Set<Integer> getPartitions() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java
index 6a313f0..cfe999c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java
@@ -21,15 +21,16 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 import org.apache.hyracks.control.nc.NodeControllerService;
 
-public class StartupTaskRequestMessage implements INCLifecycleMessage {
+public class StartupTaskRequestMessage implements INCLifecycleMessage, ICcAddressedMessage {
 
     private static final Logger LOGGER = Logger.getLogger(StartupTaskRequestMessage.class.getName());
     private static final long serialVersionUID = 1L;
@@ -52,8 +53,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     public SystemState getState() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java
index 92abf5b..aaf3eb8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java
@@ -23,12 +23,14 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.api.INCLifecycleTask;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.service.IControllerService;
 
-public class StartupTaskResponseMessage implements INCLifecycleMessage {
+public class StartupTaskResponseMessage implements INCLifecycleMessage, INcAddressedMessage {
 
     private static final Logger LOGGER = Logger.getLogger(StartupTaskResponseMessage.class.getName());
     private static final long serialVersionUID = 1L;
@@ -41,8 +43,9 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+    public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        INCMessageBroker broker = (INCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+        IControllerService cs = appCtx.getServiceContext().getControllerService();
         boolean success = true;
         HyracksDataException exception = null;
         try {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java
index fbc0a4d..2137924 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java
@@ -21,21 +21,20 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class TakeoverMetadataNodeRequestMessage implements INCLifecycleMessage {
+public class TakeoverMetadataNodeRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(TakeoverMetadataNodeRequestMessage.class.getName());
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
-        INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+    public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+        INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
         HyracksDataException hde = null;
         try {
             appContext.initializeMetadata(false);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java
index 428047c..ff1b2d2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java
@@ -18,12 +18,13 @@
  */
 package org.apache.asterix.app.replication.message;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class TakeoverMetadataNodeResponseMessage implements INCLifecycleMessage {
+public class TakeoverMetadataNodeResponseMessage implements INCLifecycleMessage, ICcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final String nodeId;
@@ -37,8 +38,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
index 09bb051..ea9ac55 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
@@ -22,15 +22,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class TakeoverPartitionsRequestMessage implements INCLifecycleMessage {
+public class TakeoverPartitionsRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(TakeoverPartitionsRequestMessage.class.getName());
@@ -72,9 +72,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
-        INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+    public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+        INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
         //if the NC is shutting down, it should ignore takeover partitions request
         if (!appContext.isShuttingdown()) {
             HyracksDataException hde = null;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java
index e653a64..d9484f9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java
@@ -18,12 +18,13 @@
  */
 package org.apache.asterix.app.replication.message;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class TakeoverPartitionsResponseMessage implements INCLifecycleMessage {
+public class TakeoverPartitionsResponseMessage implements INCLifecycleMessage, ICcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final Integer[] partitions;
@@ -49,8 +50,8 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java
index c77ad44..7ed3aef 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java
@@ -24,6 +24,7 @@
 import java.io.StringWriter;
 import java.nio.ByteBuffer;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.translator.IStatementExecutor.Stats;
 import org.apache.asterix.translator.SessionConfig;
@@ -44,8 +45,7 @@
 
 public class ResultPrinter {
 
-    // TODO(tillw): Should this be static?
-    private static FrameManager resultDisplayFrameMgr = new FrameManager(ResultReader.FRAME_SIZE);
+    private final FrameManager resultDisplayFrameMgr;
 
     private final SessionConfig conf;
     private final Stats stats;
@@ -62,12 +62,13 @@
     private ObjectMapper om;
     private ObjectWriter ow;
 
-    public ResultPrinter(SessionConfig conf, Stats stats, ARecordType recordType) {
+    public ResultPrinter(ICcApplicationContext appCtx, SessionConfig conf, Stats stats, ARecordType recordType) {
         this.conf = conf;
         this.stats = stats;
         this.recordType = recordType;
         this.indentJSON = conf.is(SessionConfig.FORMAT_INDENT_JSON);
         this.quoteRecord = conf.is(SessionConfig.FORMAT_QUOTE_RECORD);
+        this.resultDisplayFrameMgr = new FrameManager(appCtx.getCompilerProperties().getFrameSize());
         if (indentJSON) {
             this.om = new ObjectMapper();
             DefaultPrettyPrinter.Indenter i = new DefaultPrettyPrinter.Indenter() {
@@ -164,8 +165,8 @@
             // TODO(tillw): this is inefficient - do this during record generation
             try {
                 record = ow.writeValueAsString(om.readValue(result, Object.class));
-            } catch (IOException e) {
-                throw new HyracksDataException(e);
+            } catch (IOException e) { // NOSONAR if JSON parsing fails, just use the original string
+                record = result;
             }
         }
         if (conf.fmt() == SessionConfig.OutputFormat.CSV) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java
index 1871476..eeb01ba 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.app.result;
 
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
 import org.apache.hyracks.api.dataset.DatasetJobRecord.Status;
@@ -37,8 +36,6 @@
     // Number of parallel result reader buffers
     public static final int NUM_READERS = 1;
 
-    public static final int FRAME_SIZE = AppContextInfo.INSTANCE.getCompilerProperties().getFrameSize();
-
     public ResultReader(IHyracksDataset hdc, JobId jobId, ResultSetId resultSetId) throws HyracksDataException {
         reader = hdc.createReader(jobId, resultSetId);
         frameTupleAccessor = new ResultFrameTupleAccessor();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java
index 15ed1b4..e2f17ac 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java
@@ -23,6 +23,7 @@
 import java.util.concurrent.Executors;
 
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.translator.IStatementExecutor;
@@ -47,8 +48,9 @@
     }
 
     @Override
-    public IStatementExecutor create(List<Statement> statements, SessionConfig conf,
+    public IStatementExecutor create(ICcApplicationContext appCtx, List<Statement> statements, SessionConfig conf,
             ILangCompilationProvider compilationProvider, IStorageComponentProvider storageComponentProvider) {
-        return new QueryTranslator(statements, conf, compilationProvider, storageComponentProvider, executorService);
+        return new QueryTranslator(appCtx, statements, conf, compilationProvider, storageComponentProvider,
+                executorService);
     }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index cb59f5c..e97a7e1 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -39,6 +39,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.active.ActiveJobNotificationHandler;
+import org.apache.asterix.active.ActiveLifecycleListener;
 import org.apache.asterix.active.ActivityState;
 import org.apache.asterix.active.EntityId;
 import org.apache.asterix.active.IActiveEntityEventsListener;
@@ -58,6 +59,7 @@
 import org.apache.asterix.common.config.ExternalProperties;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
@@ -149,7 +151,6 @@
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.types.TypeSignature;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.transaction.management.service.transaction.DatasetIdFactory;
 import org.apache.asterix.translator.AbstractLangTranslator;
 import org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement;
@@ -201,6 +202,7 @@
 
     public static final boolean IS_DEBUG_MODE = false;// true
     protected final List<Statement> statements;
+    protected final ICcApplicationContext appCtx;
     protected final SessionConfig sessionConfig;
     protected Dataverse activeDataverse;
     protected final List<FunctionDecl> declaredFunctions;
@@ -209,8 +211,10 @@
     protected final IStorageComponentProvider componentProvider;
     protected final ExecutorService executorService;
 
-    public QueryTranslator(List<Statement> statements, SessionConfig conf, ILangCompilationProvider compliationProvider,
-            IStorageComponentProvider componentProvider, ExecutorService executorService) {
+    public QueryTranslator(ICcApplicationContext appCtx, List<Statement> statements, SessionConfig conf,
+            ILangCompilationProvider compliationProvider, IStorageComponentProvider componentProvider,
+            ExecutorService executorService) {
+        this.appCtx = appCtx;
         this.statements = statements;
         this.sessionConfig = conf;
         this.componentProvider = componentProvider;
@@ -260,9 +264,9 @@
                 if (sessionConfig.is(SessionConfig.FORMAT_HTML)) {
                     sessionConfig.out().println(ApiServlet.HTML_STATEMENT_SEPARATOR);
                 }
-                validateOperation(activeDataverse, stmt);
+                validateOperation(appCtx, activeDataverse, stmt);
                 rewriteStatement(stmt); // Rewrite the statement's AST.
-                MetadataProvider metadataProvider = new MetadataProvider(activeDataverse, componentProvider);
+                MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse, componentProvider);
                 metadataProvider.setWriterFactory(writerFactory);
                 metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
                 metadataProvider.setOutputFile(outputFile);
@@ -533,7 +537,7 @@
                 throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
             }
             String ngName = ngNameId != null ? ngNameId.getValue()
-                    : configureNodegroupForDataset(dataverseName, datasetName, dd.getHints(), mdTxnCtx);
+                    : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, mdTxnCtx);
 
             if (compactionPolicy == null) {
                 compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
@@ -677,7 +681,9 @@
 
     protected void validateIfResourceIsActiveInFeed(Dataset dataset) throws CompilationException {
         StringBuilder builder = null;
-        IActiveEntityEventsListener[] listeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+        IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
         for (IActiveEntityEventsListener listener : listeners) {
             if (listener.isEntityUsingDataset(dataset)) {
                 if (builder == null) {
@@ -704,8 +710,8 @@
         }
     }
 
-    protected static String configureNodegroupForDataset(String dataverseName, String datasetName,
-            Map<String, String> hints, MetadataTransactionContext mdTxnCtx) throws CompilationException {
+    protected static String configureNodegroupForDataset(ICcApplicationContext appCtx, Map<String, String> hints,
+            String dataverseName, String datasetName, MetadataTransactionContext mdTxnCtx) throws CompilationException {
         int nodegroupCardinality;
         String nodegroupName;
         String hintValue = hints.get(DatasetNodegroupCardinalityHint.NAME);
@@ -714,16 +720,16 @@
             return nodegroupName;
         } else {
             int numChosen = 0;
-            boolean valid = DatasetHints.validate(DatasetNodegroupCardinalityHint.NAME,
+            boolean valid = DatasetHints.validate(appCtx, DatasetNodegroupCardinalityHint.NAME,
                     hints.get(DatasetNodegroupCardinalityHint.NAME)).first;
             if (!valid) {
                 throw new CompilationException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
             } else {
                 nodegroupCardinality = Integer.parseInt(hints.get(DatasetNodegroupCardinalityHint.NAME));
             }
-            List<String> nodeNames = AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames();
+            List<String> nodeNames = appCtx.getMetadataProperties().getNodeNames();
             List<String> nodeNamesClone = new ArrayList<>(nodeNames);
-            String metadataNodeName = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataNodeName();
+            String metadataNodeName = appCtx.getMetadataProperties().getMetadataNodeName();
             List<String> selectedNodes = new ArrayList<>();
             selectedNodes.add(metadataNodeName);
             numChosen++;
@@ -1168,9 +1174,11 @@
                 }
             }
             // # disconnect all feeds from any datasets in the dataverse.
-            IActiveEntityEventsListener[] activeListeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+            ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+            ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+            IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
             Identifier dvId = new Identifier(dataverseName);
-            MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getDefaultDataverse(),
+            MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(),
                     metadataProvider.getStorageComponentProvider());
             tempMdProvider.setConfig(metadataProvider.getConfig());
             for (IActiveEntityEventsListener listener : activeListeners) {
@@ -1181,7 +1189,7 @@
                     stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())),
                             tempMdProvider);
                     // prepare job to remove feed log storage
-                    jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(
+                    jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider,
                             MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
                 }
             }
@@ -1395,7 +1403,9 @@
                 throw new AlgebricksException(
                         "There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
             }
-            IActiveEntityEventsListener[] listeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+            ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+            ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+            IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
             StringBuilder builder = null;
             for (IActiveEntityEventsListener listener : listeners) {
                 if (listener.isEntityUsingDataset(ds)) {
@@ -1860,7 +1870,7 @@
             }
             String adaptorName = cfs.getAdaptorName();
             feed = new Feed(dataverseName, feedName, adaptorName, cfs.getAdaptorConfiguration());
-            FeedMetadataUtil.validateFeed(feed, mdTxnCtx, metadataProvider.getLibraryManager());
+            FeedMetadataUtil.validateFeed(feed, mdTxnCtx, appCtx);
             MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
@@ -1954,13 +1964,14 @@
             }
 
             EntityId feedId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
-            FeedEventsListener listener =
-                    (FeedEventsListener) ActiveJobNotificationHandler.INSTANCE.getActiveEntityListener(feedId);
+            ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+            ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+            FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(feedId);
             if (listener != null) {
                 throw new AlgebricksException("Feed " + feedId
                         + " is currently active and connected to the following dataset(s) \n" + listener.toString());
             } else {
-                JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(
+                JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider,
                         MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName()));
                 JobUtils.runJob(hcc, spec, true);
                 MetadataManager.INSTANCE.dropFeed(mdTxnCtx, dataverseName, feedName);
@@ -2024,8 +2035,9 @@
         ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
         IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
         DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
-        FeedEventsListener listener =
-                (FeedEventsListener) ActiveJobNotificationHandler.INSTANCE.getActiveEntityListener(entityId);
+        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+        FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(entityId);
         if (listener != null) {
             throw new AlgebricksException("Feed " + feedName + " is started already.");
         }
@@ -2044,8 +2056,8 @@
                             compilationProvider, storageComponentProvider, qtFactory, hcc);
 
             JobSpecification feedJob = jobInfo.getLeft();
-            listener = new FeedEventsListener(entityId, datasets, jobInfo.getRight().getLocations());
-            ActiveJobNotificationHandler.INSTANCE.registerListener(listener);
+            listener = new FeedEventsListener(appCtx, entityId, datasets, jobInfo.getRight().getLocations());
+            activeEventHandler.registerListener(listener);
             IActiveEventSubscriber eventSubscriber = listener.subscribe(ActivityState.STARTED);
             feedJob.setProperty(ActiveJobNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
             JobUtils.runJob(hcc, feedJob,
@@ -2055,7 +2067,7 @@
         } catch (Exception e) {
             abort(e, e, mdTxnCtx);
             if (listener != null) {
-                ActiveJobNotificationHandler.INSTANCE.unregisterListener(listener);
+                activeEventHandler.unregisterListener(listener);
             }
             throw e;
         } finally {
@@ -2068,9 +2080,10 @@
         String dataverseName = getActiveDataverse(sfst.getDataverseName());
         String feedName = sfst.getFeedName().getValue();
         EntityId feedId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
+        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
         // Obtain runtime info from ActiveListener
-        FeedEventsListener listener =
-                (FeedEventsListener) ActiveJobNotificationHandler.INSTANCE.getActiveEntityListener(feedId);
+        FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(feedId);
         if (listener == null) {
             throw new AlgebricksException("Feed " + feedName + " is not started.");
         }
@@ -2085,7 +2098,7 @@
             // Construct ActiveMessage
             for (int i = 0; i < listener.getSources().length; i++) {
                 String intakeLocation = listener.getSources()[i];
-                FeedOperations.SendStopMessageToNode(feedId, intakeLocation, i);
+                FeedOperations.SendStopMessageToNode(appCtx, feedId, intakeLocation, i);
             }
             eventSubscriber.sync();
         } catch (Exception e) {
@@ -2106,7 +2119,9 @@
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         // Check whether feed is alive
-        if (ActiveJobNotificationHandler.INSTANCE
+        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+        if (activeEventHandler
                 .getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
             throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
         }
@@ -2145,8 +2160,10 @@
         String feedName = cfs.getFeedName().getValue();
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
         // Check whether feed is alive
-        if (ActiveJobNotificationHandler.INSTANCE
+        if (activeEventHandler
                 .getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
             throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
         }
@@ -2322,7 +2339,7 @@
             case IMMEDIATE:
                 createAndRunJob(hcc, null, compiler, locker, resultDelivery, id -> {
                     final ResultReader resultReader = new ResultReader(hdc, id, resultSetId);
-                    ResultUtil.printResults(resultReader, sessionConfig, stats,
+                    ResultUtil.printResults(appCtx, resultReader, sessionConfig, stats,
                             metadataProvider.findOutputRecordType());
                 }, clientContextId, ctx);
                 break;
@@ -2698,7 +2715,7 @@
             if (pregelixHome == null) {
                 // Since there is a default value for PREGELIX_HOME in CompilerProperties,
                 // pregelixHome can never be null.
-                pregelixHome = AppContextInfo.INSTANCE.getCompilerProperties().getPregelixHome();
+                pregelixHome = appCtx.getCompilerProperties().getPregelixHome();
             }
 
             // Constructs the pregelix command line.
@@ -2819,7 +2836,7 @@
     protected List<String> constructPregelixCommand(RunStatement pregelixStmt, String fromDataverseName,
             String fromDatasetName, String toDataverseName, String toDatasetName) {
         // Constructs AsterixDB parameters, e.g., URL, source dataset and sink dataset.
-        ExternalProperties externalProperties = AppContextInfo.INSTANCE.getExternalProperties();
+        ExternalProperties externalProperties = appCtx.getExternalProperties();
         String clientIP = ClusterProperties.INSTANCE.getCluster().getMasterNode().getClientIp();
         StringBuilder asterixdbParameterBuilder = new StringBuilder();
         asterixdbParameterBuilder.append(
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
index d4f2129..c9b3565 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
@@ -27,6 +27,7 @@
 import org.apache.asterix.api.java.AsterixJavaClient;
 import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
 import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.compiler.provider.AqlCompilationProvider;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.file.StorageComponentProvider;
@@ -58,7 +59,9 @@
         try {
             for (String queryFile : options.args) {
                 Reader in = new FileReader(queryFile);
-                AsterixJavaClient ajc = new AsterixJavaClient(integrationUtil.getHyracksClientConnection(), in,
+                AsterixJavaClient ajc =
+                        new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(),
+                        integrationUtil.getHyracksClientConnection(), in,
                         compilationProvider, new DefaultStatementExecutorFactory(), new StorageComponentProvider());
                 try {
                     ajc.compile(true, false, false, false, false, true, false);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
index f44aeb9..ba44833 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
@@ -64,7 +64,7 @@
             boolean onlyPhysical, boolean createBinaryRuntime) throws Exception {
         ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
         FileReader reader = new FileReader(filename);
-        AsterixJavaClient q = new AsterixJavaClient(hcc, reader, compilationProvider,
+        AsterixJavaClient q = new AsterixJavaClient(null, hcc, reader, compilationProvider,
                 new DefaultStatementExecutorFactory(), new StorageComponentProvider());
         q.compile(optimize, true, true, true, onlyPhysical, createBinaryRuntime, createBinaryRuntime);
         return q;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 578c206..9c66f57 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -60,7 +60,6 @@
 import org.apache.asterix.common.config.MetadataProperties;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.library.ILibraryManager;
-import org.apache.asterix.common.messaging.api.ICCMessageBroker;
 import org.apache.asterix.common.replication.IFaultToleranceStrategy;
 import org.apache.asterix.common.replication.IReplicationStrategy;
 import org.apache.asterix.common.utils.Servlets;
@@ -72,7 +71,7 @@
 import org.apache.asterix.metadata.bootstrap.AsterixStateProxy;
 import org.apache.asterix.metadata.cluster.ClusterManagerProvider;
 import org.apache.asterix.runtime.job.resource.JobCapacityController;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.asterix.translator.IStatementExecutorFactory;
 import org.apache.hyracks.api.application.ICCServiceContext;
@@ -96,8 +95,9 @@
     protected ICCServiceContext ccServiceCtx;
     protected CCExtensionManager ccExtensionManager;
     protected IStorageComponentProvider componentProvider;
-    private IJobCapacityController jobCapacityController;
     protected WebManager webManager;
+    protected CcApplicationContext appCtx;
+    private IJobCapacityController jobCapacityController;
 
     @Override
     public void start(IServiceContext serviceCtx, String[] args) throws Exception {
@@ -105,8 +105,8 @@
             throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
         }
         final ClusterControllerService controllerService = (ClusterControllerService) serviceCtx.getControllerService();
-        ICCMessageBroker messageBroker = new CCMessageBroker(controllerService);
         this.ccServiceCtx = (ICCServiceContext) serviceCtx;
+        ccServiceCtx.setMessageBroker(new CCMessageBroker(controllerService));
 
         configureLoggingLevel(ccServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
 
@@ -120,34 +120,33 @@
         ResourceIdManager resourceIdManager = new ResourceIdManager();
         IReplicationStrategy repStrategy = ClusterProperties.INSTANCE.getReplicationStrategy();
         IFaultToleranceStrategy ftStrategy = FaultToleranceStrategyFactory
-                .create(ClusterProperties.INSTANCE.getCluster(), repStrategy, messageBroker);
+                .create(ClusterProperties.INSTANCE.getCluster(), repStrategy, ccServiceCtx);
         ExternalLibraryUtils.setUpExternaLibraries(libraryManager, false);
         componentProvider = new StorageComponentProvider();
         GlobalRecoveryManager.instantiate((HyracksConnection) getHcc(), componentProvider);
-        AppContextInfo.initialize(ccServiceCtx, getHcc(), libraryManager, resourceIdManager,
-                () -> MetadataManager.INSTANCE, GlobalRecoveryManager.instance(), ftStrategy);
+        appCtx = new CcApplicationContext(ccServiceCtx, getHcc(), libraryManager, resourceIdManager,
+                () -> MetadataManager.INSTANCE, GlobalRecoveryManager.instance(), ftStrategy,
+                new ActiveLifecycleListener());
+        ClusterStateManager.INSTANCE.setCcAppCtx(appCtx);
         ccExtensionManager = new CCExtensionManager(getExtensions());
-        AppContextInfo.INSTANCE.setExtensionManager(ccExtensionManager);
+        appCtx.setExtensionManager(ccExtensionManager);
         final CCConfig ccConfig = controllerService.getCCConfig();
         if (System.getProperty("java.rmi.server.hostname") == null) {
             System.setProperty("java.rmi.server.hostname", ccConfig.getClusterListenAddress());
         }
-        MetadataProperties metadataProperties = AppContextInfo.INSTANCE.getMetadataProperties();
+        MetadataProperties metadataProperties = appCtx.getMetadataProperties();
 
         setAsterixStateProxy(AsterixStateProxy.registerRemoteObject(metadataProperties.getMetadataCallbackPort()));
         ccServiceCtx.setDistributedState(proxy);
-
         MetadataManager.initialize(proxy, metadataProperties);
-
-        AppContextInfo.INSTANCE.getCCServiceContext().addJobLifecycleListener(ActiveLifecycleListener.INSTANCE);
+        ccServiceCtx.addJobLifecycleListener(appCtx.getActiveLifecycleListener());
 
         // create event loop groups
         webManager = new WebManager();
         configureServers();
         webManager.start();
         ClusterManagerProvider.getClusterManager().registerSubscriber(GlobalRecoveryManager.instance());
-        ccServiceCtx.addClusterLifecycleListener(ClusterLifecycleListener.INSTANCE);
-        ccServiceCtx.setMessageBroker(messageBroker);
+        ccServiceCtx.addClusterLifecycleListener(new ClusterLifecycleListener(appCtx));
 
         jobCapacityController = new JobCapacityController(controllerService.getResourceManager());
     }
@@ -160,18 +159,18 @@
     }
 
     protected List<AsterixExtension> getExtensions() {
-        return AppContextInfo.INSTANCE.getExtensionProperties().getExtensions();
+        return appCtx.getExtensionProperties().getExtensions();
     }
 
     protected void configureServers() throws Exception {
-        webManager.add(setupWebServer(AppContextInfo.INSTANCE.getExternalProperties()));
-        webManager.add(setupJSONAPIServer(AppContextInfo.INSTANCE.getExternalProperties()));
-        webManager.add(setupQueryWebServer(AppContextInfo.INSTANCE.getExternalProperties()));
+        webManager.add(setupWebServer(appCtx.getExternalProperties()));
+        webManager.add(setupJSONAPIServer(appCtx.getExternalProperties()));
+        webManager.add(setupQueryWebServer(appCtx.getExternalProperties()));
     }
 
     @Override
     public void stop() throws Exception {
-        ActiveLifecycleListener.INSTANCE.stop();
+        ((ActiveLifecycleListener) appCtx.getActiveLifecycleListener()).stop();
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Stopping Asterix cluster controller");
         }
@@ -184,7 +183,7 @@
                 externalProperties.getWebInterfacePort());
         IHyracksClientConnection hcc = getHcc();
         webServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-        webServer.addServlet(new ApiServlet(webServer.ctx(), new String[] { "/*" },
+        webServer.addServlet(new ApiServlet(webServer.ctx(), new String[] { "/*" }, appCtx,
                 ccExtensionManager.getAqlCompilationProvider(), ccExtensionManager.getSqlppCompilationProvider(),
                 getStatementExecutorFactory(), componentProvider));
         return webServer;
@@ -195,7 +194,7 @@
                 new HttpServer(webManager.getBosses(), webManager.getWorkers(), externalProperties.getAPIServerPort());
         IHyracksClientConnection hcc = getHcc();
         jsonAPIServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-        jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, AppContextInfo.INSTANCE);
+        jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, appCtx);
         jsonAPIServer.setAttribute(ServletConstants.EXECUTOR_SERVICE_ATTR,
                 ccServiceCtx.getControllerService().getExecutor());
 
@@ -235,47 +234,47 @@
                 externalProperties.getQueryWebInterfacePort());
         IHyracksClientConnection hcc = getHcc();
         queryWebServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-        queryWebServer.addServlet(new QueryWebInterfaceServlet(queryWebServer.ctx(), new String[] { "/*" }));
+        queryWebServer.addServlet(new QueryWebInterfaceServlet(appCtx, queryWebServer.ctx(), new String[] { "/*" }));
         return queryWebServer;
     }
 
     protected IServlet createServlet(ConcurrentMap<String, Object> ctx, String key, String... paths) {
         switch (key) {
             case Servlets.AQL:
-                return new FullApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+                return new FullApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.AQL_QUERY:
-                return new QueryApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+                return new QueryApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.AQL_UPDATE:
-                return new UpdateApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+                return new UpdateApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.AQL_DDL:
-                return new DdlApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+                return new DdlApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.SQLPP:
-                return new FullApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+                return new FullApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.SQLPP_QUERY:
-                return new QueryApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+                return new QueryApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.SQLPP_UPDATE:
-                return new UpdateApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+                return new UpdateApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.SQLPP_DDL:
-                return new DdlApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+                return new DdlApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.RUNNING_REQUESTS:
                 return new QueryCancellationServlet(ctx, paths);
             case Servlets.QUERY_STATUS:
-                return new QueryStatusApiServlet(ctx, paths);
+                return new QueryStatusApiServlet(ctx, paths, appCtx);
             case Servlets.QUERY_RESULT:
-                return new QueryResultApiServlet(ctx, paths);
+                return new QueryResultApiServlet(ctx, paths, appCtx);
             case Servlets.QUERY_SERVICE:
-                return new QueryServiceServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+                return new QueryServiceServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
                         getStatementExecutorFactory(), componentProvider);
             case Servlets.CONNECTOR:
-                return new ConnectorApiServlet(ctx, paths);
+                return new ConnectorApiServlet(ctx, paths, appCtx);
             case Servlets.SHUTDOWN:
                 return new ShutdownApiServlet(ctx, paths);
             case Servlets.VERSION:
@@ -287,7 +286,7 @@
             case Servlets.CLUSTER_STATE_CC_DETAIL:
                 return new ClusterControllerDetailsApiServlet(ctx, paths);
             case Servlets.DIAGNOSTICS:
-                return new DiagnosticsApiServlet(ctx, paths);
+                return new DiagnosticsApiServlet(ctx, paths, appCtx);
             default:
                 throw new IllegalStateException(String.valueOf(key));
         }
@@ -299,7 +298,7 @@
 
     @Override
     public void startupCompleted() throws Exception {
-        ccServiceCtx.getControllerService().getExecutor().submit((Callable)() -> {
+        ccServiceCtx.getControllerService().getExecutor().submit((Callable) () -> {
             ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE);
             ClusterManagerProvider.getClusterManager().notifyStartupCompleted();
             return null;
@@ -322,8 +321,8 @@
     }
 
     @Override
-    public AppContextInfo getApplicationContext() {
-        return AppContextInfo.INSTANCE;
+    public CcApplicationContext getApplicationContext() {
+        return appCtx;
     }
 
     protected IHyracksClientConnection getHcc() throws Exception {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index 8883504..66f76c5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -41,6 +41,7 @@
 import org.apache.asterix.metadata.cluster.ClusterManagerProvider;
 import org.apache.asterix.metadata.cluster.RemoveNodeWork;
 import org.apache.asterix.metadata.cluster.RemoveNodeWorkResponse;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.api.application.IClusterLifecycleListener;
 import org.apache.hyracks.api.config.IOption;
@@ -49,16 +50,14 @@
 public class ClusterLifecycleListener implements IClusterLifecycleListener {
 
     private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
+    private final CcApplicationContext appCtx;
+    private final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<>();
+    private final ClusterWorkExecutor eventHandler;
+    private final List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<>();
 
-    private static final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<Set<IClusterManagementWork>>();
-
-    private static ClusterWorkExecutor eventHandler = new ClusterWorkExecutor(workRequestQueue);
-
-    private static List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<IClusterManagementWorkResponse>();
-
-    public static ClusterLifecycleListener INSTANCE = new ClusterLifecycleListener();
-
-    private ClusterLifecycleListener() {
+    public ClusterLifecycleListener(CcApplicationContext appCtx) {
+        this.appCtx = appCtx;
+        eventHandler = new ClusterWorkExecutor(appCtx, workRequestQueue);
         Thread t = new Thread(eventHandler);
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Starting cluster event handler");
@@ -78,12 +77,12 @@
             MetadataManager.INSTANCE.rebindMetadataNode();
         }
 
-        Set<String> nodeAddition = new HashSet<String>();
+        Set<String> nodeAddition = new HashSet<>();
         nodeAddition.add(nodeId);
         updateProgress(ClusterEventType.NODE_JOIN, nodeAddition);
         Set<IClusterEventsSubscriber> subscribers =
                 ClusterManagerProvider.getClusterManager().getRegisteredClusterEventSubscribers();
-        Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+        Set<IClusterManagementWork> work = new HashSet<>();
         for (IClusterEventsSubscriber sub : subscribers) {
             Set<IClusterManagementWork> workRequest = sub.notifyNodeJoin(nodeId);
             work.addAll(workRequest);
@@ -110,7 +109,7 @@
         updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds);
         Set<IClusterEventsSubscriber> subscribers =
                 ClusterManagerProvider.getClusterManager().getRegisteredClusterEventSubscribers();
-        Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+        Set<IClusterManagementWork> work = new HashSet<>();
         for (IClusterEventsSubscriber sub : subscribers) {
             Set<IClusterManagementWork> workRequest = sub.notifyNodeFailure(deadNodeIds);
             work.addAll(workRequest);
@@ -121,7 +120,7 @@
     }
 
     private void updateProgress(ClusterEventType eventType, Collection<String> nodeIds) {
-        List<IClusterManagementWorkResponse> completedResponses = new ArrayList<IClusterManagementWorkResponse>();
+        List<IClusterManagementWorkResponse> completedResponses = new ArrayList<>();
         boolean isComplete = false;
         for (IClusterManagementWorkResponse resp : pendingWorkResponses) {
             switch (eventType) {
@@ -149,9 +148,9 @@
 
     private void executeWorkSet(Set<IClusterManagementWork> workSet) {
         int nodesToAdd = 0;
-        Set<String> nodesToRemove = new HashSet<String>();
-        Set<AddNodeWork> nodeAdditionRequests = new HashSet<AddNodeWork>();
-        Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+        Set<String> nodesToRemove = new HashSet<>();
+        Set<AddNodeWork> nodeAdditionRequests = new HashSet<>();
+        Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>();
         for (IClusterManagementWork w : workSet) {
             switch (w.getClusterManagementWorkType()) {
                 case ADD_NODE:
@@ -163,20 +162,20 @@
                 case REMOVE_NODE:
                     nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
                     nodeRemovalRequests.add(w);
-                    RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w,
-                            Status.IN_PROGRESS);
+                    RemoveNodeWorkResponse response =
+                            new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
                     pendingWorkResponses.add(response);
                     break;
             }
         }
 
-        List<String> addedNodes = new ArrayList<String>();
+        List<String> addedNodes = new ArrayList<>();
         String asterixInstanceName = ClusterProperties.INSTANCE.getCluster().getInstanceName();
         for (int i = 0; i < nodesToAdd; i++) {
             Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode();
             if (node != null) {
                 try {
-                    ClusterManagerProvider.getClusterManager().addNode(node);
+                    ClusterManagerProvider.getClusterManager().addNode(appCtx, node);
                     addedNodes.add(asterixInstanceName + "_" + node.getId());
                     if (LOGGER.isLoggable(Level.INFO)) {
                         LOGGER.info("Added NC at:" + node.getId());
@@ -197,7 +196,7 @@
 
         for (AddNodeWork w : nodeAdditionRequests) {
             int n = w.getNumberOfNodesRequested();
-            List<String> nodesToBeAddedForWork = new ArrayList<String>();
+            List<String> nodesToBeAddedForWork = new ArrayList<>();
             for (int i = 0; i < n && i < addedNodes.size(); i++) {
                 nodesToBeAddedForWork.add(addedNodes.get(i));
             }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
index 71fff3f..46968b4 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
@@ -25,6 +25,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.event.schema.cluster.Node;
 import org.apache.asterix.metadata.cluster.AddNodeWork;
@@ -36,9 +37,11 @@
 
     private static final Logger LOGGER = Logger.getLogger(ClusterWorkExecutor.class.getName());
 
+    private final ICcApplicationContext appCtx;
     private final LinkedBlockingQueue<Set<IClusterManagementWork>> inbox;
 
-    public ClusterWorkExecutor(LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
+    public ClusterWorkExecutor(ICcApplicationContext appCtx, LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
+        this.appCtx = appCtx;
         this.inbox = inbox;
     }
 
@@ -48,9 +51,9 @@
             try {
                 Set<IClusterManagementWork> workSet = inbox.take();
                 int nodesToAdd = 0;
-                Set<String> nodesToRemove = new HashSet<String>();
-                Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<IClusterManagementWork>();
-                Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+                Set<String> nodesToRemove = new HashSet<>();
+                Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<>();
+                Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>();
                 for (IClusterManagementWork w : workSet) {
                     switch (w.getClusterManagementWorkType()) {
                         case ADD_NODE:
@@ -66,12 +69,12 @@
                     }
                 }
 
-                Set<Node> addedNodes = new HashSet<Node>();
+                Set<Node> addedNodes = new HashSet<>();
                 for (int i = 0; i < nodesToAdd; i++) {
                     Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode();
                     if (node != null) {
                         try {
-                            ClusterManagerProvider.getClusterManager().addNode(node);
+                            ClusterManagerProvider.getClusterManager().addNode(appCtx, node);
                             addedNodes.add(node);
                             if (LOGGER.isLoggable(Level.INFO)) {
                                 LOGGER.info("Added NC at:" + node.getId());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index 7bd1e62..722bb78 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -33,6 +33,7 @@
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.config.DatasetConfig.TransactionState;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -71,7 +72,6 @@
 
     @Override
     public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
-        startGlobalRecovery();
         return Collections.emptySet();
     }
 
@@ -92,7 +92,7 @@
     }
 
     @Override
-    public void startGlobalRecovery() {
+    public void startGlobalRecovery(ICcApplicationContext appCtx) {
         // perform global recovery if state changed to active
         final ClusterState newState = ClusterStateManager.INSTANCE.getState();
         boolean needToRecover = !newState.equals(state) && (newState == ClusterState.ACTIVE);
@@ -110,7 +110,8 @@
                         List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
                         for (Dataverse dataverse : dataverses) {
                             if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
-                                MetadataProvider metadataProvider = new MetadataProvider(dataverse, componentProvider);
+                                MetadataProvider metadataProvider =
+                                        new MetadataProvider(appCtx, dataverse, componentProvider);
                                 try {
                                     List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
                                             dataverse.getDataverseName());
@@ -120,11 +121,11 @@
                                             // Get indexes
                                             List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
                                                     dataset.getDataverseName(), dataset.getDatasetName());
+                                            // Get the state of the dataset
+                                            ExternalDatasetDetails dsd =
+                                                    (ExternalDatasetDetails) dataset.getDatasetDetails();
+                                            TransactionState datasetState = dsd.getState();
                                             if (!indexes.isEmpty()) {
-                                                // Get the state of the dataset
-                                                ExternalDatasetDetails dsd =
-                                                        (ExternalDatasetDetails) dataset.getDatasetDetails();
-                                                TransactionState datasetState = dsd.getState();
                                                 if (datasetState == TransactionState.BEGIN) {
                                                     List<ExternalFile> files = MetadataManager.INSTANCE
                                                             .getDatasetExternalFiles(mdTxnCtx, dataset);
@@ -135,59 +136,55 @@
                                                             MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                                         }
                                                     }
-                                                    // 2. clean artifacts in NCs
-                                                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                    JobSpecification jobSpec = ExternalIndexingOperations
-                                                            .buildAbortOp(dataset, indexes, metadataProvider);
-                                                    executeHyracksJob(jobSpec);
-                                                    // 3. correct the dataset state
-                                                    ((ExternalDatasetDetails) dataset.getDatasetDetails())
-                                                            .setState(TransactionState.COMMIT);
-                                                    MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                                                } else if (datasetState == TransactionState.READY_TO_COMMIT) {
-                                                    List<ExternalFile> files = MetadataManager.INSTANCE
-                                                            .getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                    // if ready to commit, roll forward
-                                                    // 1. commit indexes in NCs
-                                                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                    JobSpecification jobSpec = ExternalIndexingOperations
-                                                            .buildRecoverOp(dataset, indexes, metadataProvider);
-                                                    executeHyracksJob(jobSpec);
-                                                    // 2. add pending files in metadata
-                                                    for (ExternalFile file : files) {
-                                                        if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
-                                                            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
-                                                            file.setPendingOp(ExternalFilePendingOp.NO_OP);
-                                                            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
-                                                        } else if (file
-                                                                .getPendingOp() == ExternalFilePendingOp.DROP_OP) {
-                                                            // find original file
-                                                            for (ExternalFile originalFile : files) {
-                                                                if (originalFile.getFileName()
-                                                                        .equals(file.getFileName())) {
-                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                            file);
-                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                            originalFile);
-                                                                    break;
-                                                                }
+                                                }
+                                                // 2. clean artifacts in NCs
+                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                                                JobSpecification jobSpec = ExternalIndexingOperations
+                                                        .buildAbortOp(dataset, indexes, metadataProvider);
+                                                executeHyracksJob(jobSpec);
+                                                // 3. correct the dataset state
+                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
+                                                        .setState(TransactionState.COMMIT);
+                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                                            } else if (datasetState == TransactionState.READY_TO_COMMIT) {
+                                                List<ExternalFile> files = MetadataManager.INSTANCE
+                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
+                                                // if ready to commit, roll forward
+                                                // 1. commit indexes in NCs
+                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                                                JobSpecification jobSpec = ExternalIndexingOperations
+                                                        .buildRecoverOp(dataset, indexes, metadataProvider);
+                                                executeHyracksJob(jobSpec);
+                                                // 2. add pending files in metadata
+                                                for (ExternalFile file : files) {
+                                                    if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
+                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                                        file.setPendingOp(ExternalFilePendingOp.NO_OP);
+                                                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
+                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
+                                                        // find original file
+                                                        for (ExternalFile originalFile : files) {
+                                                            if (originalFile.getFileName().equals(file.getFileName())) {
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        file);
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        originalFile);
+                                                                break;
                                                             }
-                                                        } else if (file
-                                                                .getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
-                                                            // find original file
-                                                            for (ExternalFile originalFile : files) {
-                                                                if (originalFile.getFileName()
-                                                                        .equals(file.getFileName())) {
-                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                            file);
-                                                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                            originalFile);
-                                                                    originalFile.setSize(file.getSize());
-                                                                    MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
-                                                                            originalFile);
-                                                                }
+                                                        }
+                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
+                                                        // find original file
+                                                        for (ExternalFile originalFile : files) {
+                                                            if (originalFile.getFileName().equals(file.getFileName())) {
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        file);
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        originalFile);
+                                                                originalFile.setSize(file.getSize());
+                                                                MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
+                                                                        originalFile);
                                                             }
                                                         }
                                                     }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
index aaf3d7a..9c24acf 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
@@ -28,11 +28,10 @@
 import org.apache.asterix.app.nc.NCAppRuntimeContext;
 import org.apache.asterix.app.replication.message.StartupTaskRequestMessage;
 import org.apache.asterix.common.api.AsterixThreadFactory;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.config.AsterixExtension;
 import org.apache.asterix.common.config.ClusterProperties;
 import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.config.MessagingProperties;
 import org.apache.asterix.common.config.MetadataProperties;
 import org.apache.asterix.common.config.NodeProperties;
@@ -61,7 +60,7 @@
     private static final Logger LOGGER = Logger.getLogger(NCApplication.class.getName());
 
     private INCServiceContext ncServiceCtx;
-    private IAppRuntimeContext runtimeContext;
+    private INcApplicationContext runtimeContext;
     private String nodeId;
     private boolean stopInitiated = false;
     private SystemState systemState;
@@ -176,8 +175,7 @@
 
     @Override
     public NodeCapacity getCapacity() {
-        IPropertiesProvider propertiesProvider = runtimeContext;
-        StorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        StorageProperties storageProperties = runtimeContext.getStorageProperties();
         // Deducts the reserved buffer cache size and memory component size from the maxium heap size,
         // and deducts one core for processing heartbeats.
         long memorySize = Runtime.getRuntime().maxMemory() - storageProperties.getBufferCacheSize()
@@ -261,7 +259,7 @@
     }
 
     @Override
-    public IAppRuntimeContext getApplicationContext() {
+    public INcApplicationContext getApplicationContext() {
         return runtimeContext;
     }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
index da93fb8..23de847 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
@@ -21,8 +21,10 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.hyracks.api.messages.IMessage;
 import org.apache.hyracks.api.util.JavaSerializationUtils;
 import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -40,15 +42,16 @@
 
     @Override
     public void receivedMessage(IMessage message, String nodeId) throws Exception {
-        IApplicationMessage absMessage = (IApplicationMessage) message;
+        ICcAddressedMessage msg = (ICcAddressedMessage) message;
         if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Received message: " + absMessage);
+            LOGGER.info("Received message: " + msg);
         }
-        absMessage.handle(ccs);
+        ICcApplicationContext appCtx = (ICcApplicationContext) ccs.getApplicationContext();
+        msg.handle(appCtx);
     }
 
     @Override
-    public void sendApplicationMessageToNC(IApplicationMessage msg, String nodeId) throws Exception {
+    public void sendApplicationMessageToNC(INcAddressedMessage msg, String nodeId) throws Exception {
         INodeManager nodeManager = ccs.getNodeManager();
         NodeControllerState state = nodeManager.getNodeControllerState(nodeId);
         state.getNodeController().sendApplicationMessageToNC(JavaSerializationUtils.serialize(msg), null, nodeId);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
index 33f16da..a2a3460 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
@@ -26,7 +26,7 @@
 import org.apache.asterix.common.config.MessagingProperties;
 import org.apache.asterix.common.memory.ConcurrentFramePool;
 import org.apache.asterix.common.memory.FrameAction;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.hyracks.api.comm.IBufferAcceptor;
 import org.apache.hyracks.api.comm.IBufferFactory;
 import org.apache.hyracks.api.comm.IChannelControlBlock;
@@ -108,8 +108,8 @@
         @Override
         public void accept(ByteBuffer buffer) {
             try {
-                IApplicationMessage receivedMsg = (IApplicationMessage) JavaSerializationUtils
-                        .deserialize(buffer.array());
+                INcAddressedMessage receivedMsg =
+                        (INcAddressedMessage) JavaSerializationUtils.deserialize(buffer.array());
                 // Queue the received message and free the network IO thread
                 messageBroker.queueReceivedMessage(receivedMsg);
             } catch (ClassNotFoundException | IOException e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
index f615138..630aabe 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
@@ -24,11 +24,12 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.config.MessagingProperties;
 import org.apache.asterix.common.memory.ConcurrentFramePool;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.hyracks.api.comm.IChannelControlBlock;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.messages.IMessage;
@@ -39,14 +40,14 @@
     private static final Logger LOGGER = Logger.getLogger(NCMessageBroker.class.getName());
 
     private final NodeControllerService ncs;
-    private final IAppRuntimeContext appContext;
-    private final LinkedBlockingQueue<IApplicationMessage> receivedMsgsQ;
+    private final INcApplicationContext appContext;
+    private final LinkedBlockingQueue<INcAddressedMessage> receivedMsgsQ;
     private final ConcurrentFramePool messagingFramePool;
     private final int maxMsgSize;
 
     public NCMessageBroker(NodeControllerService ncs, MessagingProperties messagingProperties) {
         this.ncs = ncs;
-        appContext = (IAppRuntimeContext) ncs.getApplicationContext();
+        appContext = (INcApplicationContext) ncs.getApplicationContext();
         maxMsgSize = messagingProperties.getFrameSize();
         int messagingMemoryBudget = messagingProperties.getFrameSize() * messagingProperties.getFrameCount();
         messagingFramePool = new ConcurrentFramePool(ncs.getId(), messagingMemoryBudget,
@@ -57,36 +58,36 @@
     }
 
     @Override
-    public void sendMessageToCC(IApplicationMessage message) throws Exception {
+    public void sendMessageToCC(ICcAddressedMessage message) throws Exception {
         ncs.sendApplicationMessageToCC(JavaSerializationUtils.serialize(message), null);
     }
 
     @Override
-    public void sendMessageToNC(String nodeId, IApplicationMessage message)
+    public void sendMessageToNC(String nodeId, INcAddressedMessage message)
             throws Exception {
         IChannelControlBlock messagingChannel = ncs.getMessagingNetworkManager().getMessagingChannel(nodeId);
         sendMessageToChannel(messagingChannel, message);
     }
 
     @Override
-    public void queueReceivedMessage(IApplicationMessage msg) {
+    public void queueReceivedMessage(INcAddressedMessage msg) {
         receivedMsgsQ.offer(msg);
     }
 
     @Override
     public void receivedMessage(IMessage message, String nodeId) throws Exception {
-        IApplicationMessage absMessage = (IApplicationMessage) message;
+        INcAddressedMessage absMessage = (INcAddressedMessage) message;
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Received message: " + absMessage);
         }
-        absMessage.handle(ncs);
+        absMessage.handle(appContext);
     }
 
     public ConcurrentFramePool getMessagingFramePool() {
         return messagingFramePool;
     }
 
-    private void sendMessageToChannel(IChannelControlBlock ccb, IApplicationMessage msg) throws IOException {
+    private void sendMessageToChannel(IChannelControlBlock ccb, INcAddressedMessage msg) throws IOException {
         byte[] serializedMsg = JavaSerializationUtils.serialize(msg);
         if (serializedMsg.length > maxMsgSize) {
             throw new HyracksDataException("Message exceded maximum size");
@@ -115,7 +116,7 @@
         @Override
         public void run() {
             while (true) {
-                IApplicationMessage msg = null;
+                INcAddressedMessage msg = null;
                 try {
                     msg = receivedMsgsQ.take();
                     //TODO add nodeId to IApplicationMessage and pass it
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
index 48fd782..039933a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
@@ -34,7 +34,7 @@
     }
 
     public static JobSpecification dropDataverseJobSpec(Dataverse dataverse, MetadataProvider metadata) {
-        JobSpecification jobSpec = RuntimeUtils.createJobSpecification();
+        JobSpecification jobSpec = RuntimeUtils.createJobSpecification(metadata.getApplicationContext());
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadata.splitAndConstraints(dataverse.getDataverseName());
         FileRemoveOperatorDescriptor frod =
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index 8b3b020..bffaeef 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -32,8 +32,8 @@
 import org.apache.asterix.active.EntityId;
 import org.apache.asterix.active.message.ActiveManagerMessage;
 import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
-import org.apache.asterix.common.config.CompilerProperties;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -50,7 +50,6 @@
 import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
 import org.apache.asterix.external.operators.FeedIntakeOperatorNodePushable;
 import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
-import org.apache.asterix.external.util.FeedConstants;
 import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.external.util.FeedUtils.FeedRuntimeType;
 import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
@@ -66,7 +65,6 @@
 import org.apache.asterix.metadata.feeds.LocationConstraint;
 import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
 import org.apache.asterix.runtime.job.listener.MultiTransactionJobletEventListenerFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.asterix.runtime.utils.RuntimeUtils;
 import org.apache.asterix.translator.CompiledStatements;
@@ -82,7 +80,6 @@
 import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
 import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.constraints.Constraint;
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
@@ -111,15 +108,13 @@
  */
 public class FeedOperations {
 
-    private static final CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
-
     private FeedOperations() {
     }
 
     private static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed feed,
             MetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
-        spec.setFrameSize(compilerProperties.getFrameSize());
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+        spec.setFrameSize(metadataProvider.getApplicationContext().getCompilerProperties().getFrameSize());
         IAdapterFactory adapterFactory;
         IOperatorDescriptor feedIngestor;
         AlgebricksPartitionConstraint ingesterPc;
@@ -136,8 +131,9 @@
         return Pair.of(spec, adapterFactory);
     }
 
-    public static JobSpecification buildRemoveFeedStorageJob(Feed feed) throws AsterixException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+    public static JobSpecification buildRemoveFeedStorageJob(MetadataProvider metadataProvider, Feed feed)
+            throws AsterixException {
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         AlgebricksAbsolutePartitionConstraint allCluster = ClusterStateManager.INSTANCE.getClusterLocations();
         Set<String> nodes = new TreeSet<>();
         for (String node : allCluster.getLocations()) {
@@ -168,8 +164,8 @@
         List<Statement> statements = new ArrayList<>();
         statements.add(dataverseDecl);
         statements.add(subscribeStmt);
-        IStatementExecutor translator =
-                qtFactory.create(statements, sessionConfig, compilationProvider, storageComponentProvider);
+        IStatementExecutor translator = qtFactory.create(metadataProvider.getApplicationContext(), statements,
+                sessionConfig, compilationProvider, storageComponentProvider);
         // configure the metadata provider
         metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, "" + Boolean.TRUE);
         metadataProvider.getConfig().put(FeedActivityDetails.FEED_POLICY_NAME, "" + subscribeStmt.getPolicy());
@@ -218,8 +214,8 @@
             JobSpecification subJob = jobsList.get(iter1);
             operatorIdMapping.clear();
             Map<OperatorDescriptorId, IOperatorDescriptor> operatorsMap = subJob.getOperatorMap();
-            FeedConnectionId feedConnectionId = new FeedConnectionId(ingestionOp.getEntityId(),
-                    feedConnections.get(iter1).getDatasetName());
+            FeedConnectionId feedConnectionId =
+                    new FeedConnectionId(ingestionOp.getEntityId(), feedConnections.get(iter1).getDatasetName());
 
             FeedPolicyEntity feedPolicyEntity =
                     FeedMetadataUtil.validateIfPolicyExists(curFeedConnection.getDataverseName(),
@@ -232,9 +228,8 @@
                 if (opDesc instanceof LSMTreeInsertDeleteOperatorDescriptor
                         && ((LSMTreeInsertDeleteOperatorDescriptor) opDesc).isPrimary()) {
                     String operandId = ((LSMTreeInsertDeleteOperatorDescriptor) opDesc).getIndexName();
-                    metaOp = new FeedMetaOperatorDescriptor(jobSpec,
-                            feedConnectionId, opDesc, feedPolicyEntity.getProperties(), FeedRuntimeType.STORE,
-                            operandId);
+                    metaOp = new FeedMetaOperatorDescriptor(jobSpec, feedConnectionId, opDesc,
+                            feedPolicyEntity.getProperties(), FeedRuntimeType.STORE, operandId);
                     opId = metaOp.getOperatorId();
                     opDesc.setOperatorId(opId);
                 } else {
@@ -243,13 +238,12 @@
                         IPushRuntimeFactory[] runtimeFactories = algOp.getPipeline().getRuntimeFactories();
                         // Tweak AssignOp to work with messages
                         if (runtimeFactories[0] instanceof AssignRuntimeFactory && runtimeFactories.length > 1) {
-                            IConnectorDescriptor connectorDesc = subJob.getOperatorInputMap()
-                                    .get(opDesc.getOperatorId()).get(0);
+                            IConnectorDescriptor connectorDesc =
+                                    subJob.getOperatorInputMap().get(opDesc.getOperatorId()).get(0);
                             // anything on the network interface needs to be message compatible
                             if (connectorDesc instanceof MToNPartitioningConnectorDescriptor) {
-                                metaOp = new FeedMetaOperatorDescriptor(jobSpec,
-                                        feedConnectionId, opDesc, feedPolicyEntity.getProperties(),
-                                        FeedRuntimeType.COMPUTE, null);
+                                metaOp = new FeedMetaOperatorDescriptor(jobSpec, feedConnectionId, opDesc,
+                                        feedPolicyEntity.getProperties(), FeedRuntimeType.COMPUTE, null);
                                 opId = metaOp.getOperatorId();
                                 opDesc.setOperatorId(opId);
                             }
@@ -279,9 +273,8 @@
             }
 
             // make connections between operators
-            for (Entry<ConnectorDescriptorId,
-                    Pair<Pair<IOperatorDescriptor, Integer>,Pair<IOperatorDescriptor, Integer>>> entry :
-                          subJob.getConnectorOperatorMap().entrySet()) {
+            for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>,
+                    Pair<IOperatorDescriptor, Integer>>> entry : subJob.getConnectorOperatorMap().entrySet()) {
                 ConnectorDescriptorId newId = connectorIdMapping.get(entry.getKey());
                 IConnectorDescriptor connDesc = jobSpec.getConnectorMap().get(newId);
                 Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
@@ -387,16 +380,16 @@
                 ingestionLocations), intakeInfo.getRight().getPartitionConstraint());
     }
 
-    public static void SendStopMessageToNode(EntityId feedId, String intakeNodeLocation, Integer partition)
-            throws Exception {
+    public static void SendStopMessageToNode(ICcApplicationContext appCtx, EntityId feedId, String intakeNodeLocation,
+            Integer partition) throws Exception {
         ActiveManagerMessage stopFeedMessage = new ActiveManagerMessage(ActiveManagerMessage.STOP_ACTIVITY, "SRC",
                 new ActiveRuntimeId(feedId, FeedIntakeOperatorNodePushable.class.getSimpleName(), partition));
-        SendActiveMessage(stopFeedMessage, intakeNodeLocation);
+        SendActiveMessage(appCtx, stopFeedMessage, intakeNodeLocation);
     }
 
-    private static void SendActiveMessage(ActiveManagerMessage activeManagerMessage, String nodeId) throws Exception {
-        ICCMessageBroker messageBroker =
-                (ICCMessageBroker) AppContextInfo.INSTANCE.getCCServiceContext().getMessageBroker();
+    private static void SendActiveMessage(ICcApplicationContext appCtx, ActiveManagerMessage activeManagerMessage,
+            String nodeId) throws Exception {
+        ICCMessageBroker messageBroker = (ICCMessageBroker) appCtx.getServiceContext().getMessageBroker();
         messageBroker.sendApplicationMessageToNC(activeManagerMessage, nodeId);
     }
 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
index d766827..5445986 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
@@ -25,7 +25,6 @@
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
 import org.apache.asterix.runtime.operators.std.FlushDatasetOperatorDescriptor;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
@@ -46,7 +45,7 @@
 
     public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider,
             String dataverseName, String datasetName, String indexName) throws Exception {
-        CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
+        CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties();
         int frameSize = compilerProperties.getFrameSize();
         JobSpecification spec = new JobSpecification(frameSize);
 
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
index fe08b8c..8b1bbe0 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
@@ -29,6 +29,7 @@
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.asterix.api.http.server.ConnectorApiServlet;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.file.StorageComponentProvider;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -38,6 +39,7 @@
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.utils.JSONDeserializerForTypes;
+import org.apache.asterix.test.runtime.ExecutionTestUtil;
 import org.apache.asterix.test.runtime.SqlppExecutionTest;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.client.NodeControllerInfo;
@@ -63,9 +65,9 @@
     public void testGet() throws Exception {
         // Starts test asterixdb cluster.
         SqlppExecutionTest.setUp();
-
         // Configures a test connector api servlet.
-        ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" });
+        ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" },
+                (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
         Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
         ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
         PrintWriter outputWriter = new PrintWriter(outputStream);
@@ -118,7 +120,8 @@
 
     @Test
     public void testFormResponseObject() throws Exception {
-        ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" });
+        ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" },
+                (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
         ObjectMapper om = new ObjectMapper();
         ObjectNode actualResponse = om.createObjectNode();
         FileSplit[] splits = new FileSplit[2];
@@ -168,7 +171,9 @@
     private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         // Retrieves file splits of the dataset.
-        MetadataProvider metadataProvider = new MetadataProvider(null, new StorageComponentProvider());
+        MetadataProvider metadataProvider = new MetadataProvider(
+                (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null,
+                new StorageComponentProvider());
         try {
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
             Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
index 52ac855..340b1ce 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
@@ -32,7 +32,7 @@
 
 import org.apache.asterix.api.http.server.VersionApiServlet;
 import org.apache.asterix.common.config.BuildProperties;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.asterix.test.runtime.SqlppExecutionTest;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.http.api.IServletRequest;
@@ -60,7 +60,7 @@
         PrintWriter outputWriter = new PrintWriter(outputStream);
 
         // Creates mocks.
-        AppContextInfo mockCtx = mock(AppContextInfo.class);
+        CcApplicationContext mockCtx = mock(CcApplicationContext.class);
         IServletRequest mockRequest = mock(IServletRequest.class);
         IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
         IServletResponse mockResponse = mock(IServletResponse.class);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 4b2af80..3c2bee1 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -51,6 +51,7 @@
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.asterix.runtime.utils.RuntimeComponentsProvider;
 import org.apache.asterix.test.runtime.ExecutionTestUtil;
 import org.apache.asterix.transaction.management.opcallbacks.AbstractIndexModificationOperationCallback.Operation;
@@ -299,7 +300,9 @@
         Dataverse dataverse = new Dataverse(dataset.getDataverseName(), NonTaggedDataFormat.class.getName(),
                 MetadataUtil.PENDING_NO_OP);
         Index index = primaryIndexInfo.getIndex();
-        MetadataProvider mdProvider = new MetadataProvider(dataverse, storageComponentProvider);
+        CcApplicationContext appCtx =
+                (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
+        MetadataProvider mdProvider = new MetadataProvider(appCtx, dataverse, storageComponentProvider);
         try {
             return dataset.getIndexDataflowHelperFactory(mdProvider, index, primaryIndexInfo.recordType,
                     primaryIndexInfo.metaType, primaryIndexInfo.mergePolicyFactory,
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java
index e92b5e8..c67d26c 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java
@@ -35,8 +35,6 @@
 
 public class TestLibrarian implements ITestLibrarian {
 
-    public static final String LIBRARY_DIR_NAME = "library";
-
     // The following list includes a library manager for the CC
     // and library managers for NCs (one-per-NC).
     private final List<ILibraryManager> libraryManagers;
@@ -84,9 +82,6 @@
 
     public static void removeLibraryDir() throws IOException {
         File installLibDir = ExternalLibraryUtils.getLibraryInstallDir();
-        if (!installLibDir.getAbsolutePath().endsWith(LIBRARY_DIR_NAME)) {
-            throw new HyracksDataException("Invalid library directory");
-        }
         FileUtils.deleteQuietly(installLibDir);
     }
 
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java
index 5cd8a63..e2885b3 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java
@@ -37,7 +37,7 @@
 import org.apache.asterix.file.StorageComponentProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.statement.RunStatement;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
 import org.apache.asterix.translator.IStatementExecutor;
 import org.apache.asterix.translator.SessionConfig;
 import org.junit.Assert;
@@ -55,8 +55,7 @@
         RunStatement mockRunStatement = mock(RunStatement.class);
 
         // Mocks AppContextInfo.
-        AppContextInfo mockAsterixAppContextInfo = mock(AppContextInfo.class);
-        setFinalStaticField(AppContextInfo.class.getDeclaredField("INSTANCE"), mockAsterixAppContextInfo);
+        CcApplicationContext mockAsterixAppContextInfo = mock(CcApplicationContext.class);
         ExternalProperties mockAsterixExternalProperties = mock(ExternalProperties.class);
         when(mockAsterixAppContextInfo.getExternalProperties()).thenReturn(mockAsterixExternalProperties);
         when(mockAsterixExternalProperties.getAPIServerPort()).thenReturn(19002);
@@ -70,8 +69,8 @@
         when(mockCluster.getMasterNode()).thenReturn(mockMasterNode);
         when(mockMasterNode.getClientIp()).thenReturn("127.0.0.1");
 
-        IStatementExecutor aqlTranslator = new DefaultStatementExecutorFactory().create(statements, mockSessionConfig,
-                new AqlCompilationProvider(), new StorageComponentProvider());
+        IStatementExecutor aqlTranslator = new DefaultStatementExecutorFactory().create(mockAsterixAppContextInfo,
+                statements, mockSessionConfig, new AqlCompilationProvider(), new StorageComponentProvider());
         List<String> parameters = new ArrayList<>();
         parameters.add("examples/pregelix-example-jar-with-dependencies.jar");
         parameters.add("org.apache.pregelix.example.PageRankVertex");
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index d585c39..8457681 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -451,8 +451,8 @@
         return response.getEntity().getContent();
     }
 
-    public InputStream executeQueryService(String str, URI uri) throws Exception {
-        return executeQueryService(str, OutputFormat.CLEAN_JSON, uri, new ArrayList<>(), false);
+    public InputStream executeQueryService(String str, URI uri, OutputFormat fmt) throws Exception {
+        return executeQueryService(str, fmt, uri, new ArrayList<>(), false);
     }
 
     public InputStream executeQueryService(String str, OutputFormat fmt, URI uri,
@@ -755,8 +755,8 @@
                 if (ctx.getFile().getName().endsWith("aql")) {
                     executeDDL(statement, getEndpoint(Servlets.AQL_DDL));
                 } else {
-                    InputStream resultStream =
-                            executeQueryService(statement, getEndpoint(Servlets.QUERY_SERVICE));
+                    InputStream resultStream = executeQueryService(statement, getEndpoint(Servlets.QUERY_SERVICE),
+                            OutputFormat.CLEAN_JSON);
                     ResultExtractor.extract(resultStream);
                 }
                 break;
@@ -768,8 +768,8 @@
                 if (ctx.getFile().getName().endsWith("aql")) {
                     executeUpdate(statement, getEndpoint(Servlets.AQL_UPDATE));
                 } else {
-                    InputStream resultStream =
-                            executeQueryService(statement, getEndpoint(Servlets.QUERY_SERVICE));
+                    InputStream resultStream = executeQueryService(statement, getEndpoint(Servlets.QUERY_SERVICE),
+                            OutputFormat.forCompilationUnit(cUnit));
                     ResultExtractor.extract(resultStream);
                 }
                 break;
@@ -1254,7 +1254,7 @@
         try {
             ArrayList<String> toBeDropped = new ArrayList<>();
             InputStream resultStream = executeQueryService("select dv.DataverseName from Metadata.`Dataverse` as dv;",
-                    getEndpoint(Servlets.QUERY_SERVICE));
+                    getEndpoint(Servlets.QUERY_SERVICE), OutputFormat.CLEAN_JSON);
             String out = IOUtils.toString(resultStream);
             ObjectMapper om = new ObjectMapper();
             om.setConfig(
@@ -1284,8 +1284,8 @@
                     dropStatement.append(dv);
                     dropStatement.append(";\n");
                 }
-                resultStream =
-                        executeQueryService(dropStatement.toString(), getEndpoint(Servlets.QUERY_SERVICE));
+                resultStream = executeQueryService(dropStatement.toString(), getEndpoint(Servlets.QUERY_SERVICE),
+                        OutputFormat.CLEAN_JSON);
                 ResultExtractor.extract(resultStream);
             }
         } catch (Throwable th) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
index 4702b1d..7818d13 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
@@ -28,6 +28,7 @@
 import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
 import org.apache.asterix.api.java.AsterixJavaClient;
 import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.compiler.provider.AqlCompilationProvider;
 import org.apache.asterix.file.StorageComponentProvider;
@@ -61,7 +62,9 @@
         integrationUtil.init(true);
         Reader loadReader = new BufferedReader(
                 new InputStreamReader(new FileInputStream(LOAD_FOR_ENLIST_FILE), "UTF-8"));
-        AsterixJavaClient asterixLoad = new AsterixJavaClient(integrationUtil.getHyracksClientConnection(), loadReader,
+        AsterixJavaClient asterixLoad =
+                new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(),
+                        integrationUtil.getHyracksClientConnection(), loadReader,
                 ERR, new AqlCompilationProvider(), new DefaultStatementExecutorFactory(), new StorageComponentProvider());
         try {
             asterixLoad.compile(true, false, false, false, false, true, false);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
index 15581c3..409bbdc 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
@@ -33,6 +33,7 @@
 import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.compiler.provider.AqlCompilationProvider;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
@@ -65,8 +66,8 @@
     private static final String EXTENSION_RESULT = "plan";
     private static final String FILENAME_IGNORE = "ignore.txt";
     private static final String FILENAME_ONLY = "only.txt";
-    private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
-            + "optimizerts" + SEPARATOR;
+    private static final String PATH_BASE =
+            "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "optimizerts" + SEPARATOR;
     private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
     private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
     protected static final String PATH_ACTUAL = "target" + File.separator + "opttest" + SEPARATOR;
@@ -152,8 +153,8 @@
     @Test
     public void test() throws Exception {
         try {
-            String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0),
-                    '/');
+            String queryFileShort =
+                    queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0), '/');
             if (!only.isEmpty()) {
                 boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
                 if (!toRun) {
@@ -175,14 +176,15 @@
             actualFile.getParentFile().mkdirs();
 
             PrintWriter plan = new PrintWriter(actualFile);
-            ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider
-                    : sqlppCompilationProvider;
+            ILangCompilationProvider provider =
+                    queryFile.getName().endsWith("aql") ? aqlCompilationProvider : sqlppCompilationProvider;
             if (extensionLangCompilationProvider != null) {
                 provider = extensionLangCompilationProvider;
             }
             IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
-            AsterixJavaClient asterix = new AsterixJavaClient(hcc, query, plan, provider, statementExecutorFactory,
-                    storageComponentProvider);
+            AsterixJavaClient asterix =
+                    new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc,
+                            query, plan, provider, statementExecutorFactory, storageComponentProvider);
             try {
                 asterix.compile(true, false, false, true, true, false, false);
             } catch (AsterixException e) {
@@ -193,10 +195,10 @@
             plan.close();
             query.close();
 
-            BufferedReader readerExpected = new BufferedReader(
-                    new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
-            BufferedReader readerActual = new BufferedReader(
-                    new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
+            BufferedReader readerExpected =
+                    new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
+            BufferedReader readerActual =
+                    new BufferedReader(new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
 
             String lineExpected, lineActual;
             int num = 1;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
index 8714319..5c2d263 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
@@ -26,12 +26,12 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.IdentitiyResolverFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.testframework.xml.TestGroup;
 import org.apache.asterix.testframework.xml.TestSuite;
 import org.apache.hyracks.control.nc.NodeControllerService;
@@ -89,10 +89,10 @@
 
         List<ILibraryManager> libraryManagers = new ArrayList<>();
         // Adds the library manager for CC.
-        libraryManagers.add(AppContextInfo.INSTANCE.getLibraryManager());
+        libraryManagers.add(((ICcApplicationContext) integrationUtil.cc.getApplicationContext()).getLibraryManager());
         // Adds library managers for NCs, one-per-NC.
         for (NodeControllerService nc : integrationUtil.ncs) {
-            IAppRuntimeContext runtimeCtx = (IAppRuntimeContext) nc.getApplicationContext();
+            INcApplicationContext runtimeCtx = (INcApplicationContext) nc.getApplicationContext();
             libraryManagers.add(runtimeCtx.getLibraryManager());
         }
         return libraryManagers;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IndexException.java b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/explain/explain_pretty/explain_pretty.1.query.sqlpp
similarity index 72%
rename from hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IndexException.java
rename to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/explain/explain_pretty/explain_pretty.1.query.sqlpp
index 0c5d2cc..6860087 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IndexException.java
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/explain/explain_pretty/explain_pretty.1.query.sqlpp
@@ -16,17 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
-package org.apache.hyracks.storage.am.common.api;
-
-public class IndexException extends Exception {
-    private static final long serialVersionUID = 1L;
-
-    public IndexException(Exception e) {
-        super(e);
-    }
-
-    public IndexException(String message) {
-        super(message);
-    }
-}
+/*
+* Description  : EXPLAIN a plan for a very simple query
+* Expected Res : Success
+* Date         : Jul 25, 2016
+*/
+explain select value 1+1;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index b962ce8..d2379c5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -1650,7 +1650,7 @@
     <test-case FilePath="dml">
       <compilation-unit name="insert-duplicated-keys-from-query">
         <output-dir compare="Text">insert-duplicated-keys-from-query</output-dir>
-        <expected-error>Failed to insert key since key already exists</expected-error>
+        <expected-error>Inserting duplicate keys into the primary storage</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
@@ -1833,7 +1833,7 @@
     <test-case FilePath="dml">
       <compilation-unit name="insert-duplicated-keys">
         <output-dir compare="Text">insert-duplicated-keys</output-dir>
-        <expected-error>org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException: Failed to insert key since key already exists</expected-error>
+        <expected-error>Inserting duplicate keys into the primary storage</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
@@ -6742,7 +6742,7 @@
     <test-case FilePath="load">
       <compilation-unit name="duplicate-key-error">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>Input stream given to BTree bulk load has duplicates</expected-error>
+        <expected-error>Loading duplicate keys into the primary storage</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="load">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 3c075e4..c2b0951 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -131,6 +131,12 @@
         <output-dir compare="Text">explain_simple</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="explain">
+      <compilation-unit name="explain_pretty">
+        <parameter name="pretty" value="true" />
+        <output-dir compare="Text">explain_simple</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-case FilePath="flwor">
     <compilation-unit name="let33">
@@ -1719,7 +1725,7 @@
     <test-case FilePath="dml">
       <compilation-unit name="insert-duplicated-keys">
         <output-dir compare="Text">insert-duplicated-keys</output-dir>
-        <expected-error>Failed to insert key since key already exists</expected-error>
+        <expected-error>Inserting duplicate keys into the primary storage</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="dml">
@@ -7785,7 +7791,7 @@
     <test-case FilePath="load">
       <compilation-unit name="duplicate-key-error">
         <output-dir compare="Text">none</output-dir>
-        <expected-error>Input stream given to BTree bulk load has duplicates</expected-error>
+        <expected-error>Loading duplicate keys into the primary storage</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="load">
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IApplicationContext.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IApplicationContext.java
new file mode 100644
index 0000000..0aea84d
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IApplicationContext.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.api;
+
+import org.apache.asterix.common.config.ActiveProperties;
+import org.apache.asterix.common.config.BuildProperties;
+import org.apache.asterix.common.config.CompilerProperties;
+import org.apache.asterix.common.config.ExternalProperties;
+import org.apache.asterix.common.config.MessagingProperties;
+import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.config.NodeProperties;
+import org.apache.asterix.common.config.ReplicationProperties;
+import org.apache.asterix.common.config.StorageProperties;
+import org.apache.asterix.common.config.TransactionProperties;
+import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.hyracks.api.application.IServiceContext;
+
+public interface IApplicationContext {
+
+    StorageProperties getStorageProperties();
+
+    TransactionProperties getTransactionProperties();
+
+    CompilerProperties getCompilerProperties();
+
+    MetadataProperties getMetadataProperties();
+
+    ExternalProperties getExternalProperties();
+
+    ActiveProperties getActiveProperties();
+
+    BuildProperties getBuildProperties();
+
+    ReplicationProperties getReplicationProperties();
+
+    MessagingProperties getMessagingProperties();
+
+    NodeProperties getNodeProperties();
+
+    /**
+     * @return the library manager which implements {@link org.apache.asterix.common.library.ILibraryManager}
+     */
+    public ILibraryManager getLibraryManager();
+
+    IServiceContext getServiceContext();
+
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IAppRuntimeContext.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INcApplicationContext.java
similarity index 94%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IAppRuntimeContext.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INcApplicationContext.java
index da4da6b..5e69746 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IAppRuntimeContext.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INcApplicationContext.java
@@ -22,16 +22,15 @@
 import java.rmi.RemoteException;
 import java.util.concurrent.Executor;
 
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.asterix.common.replication.IReplicaResourcesManager;
 import org.apache.asterix.common.replication.IReplicationChannel;
 import org.apache.asterix.common.replication.IReplicationManager;
 import org.apache.asterix.common.transactions.ITransactionSubsystem;
+import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -42,7 +41,7 @@
 import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 import org.apache.hyracks.storage.common.file.IResourceIdFactory;
 
-public interface IAppRuntimeContext extends IPropertiesProvider {
+public interface INcApplicationContext extends IApplicationContext {
 
     IIOManager getIOManager();
 
@@ -88,8 +87,6 @@
 
     IReplicationChannel getReplicationChannel();
 
-    ILibraryManager getLibraryManager();
-
     /**
      * Exports the metadata node to the metadata RMI port.
      *
@@ -116,4 +113,7 @@
      * @return instance of {@link org.apache.asterix.common.context.IStorageComponentProvider}
      */
     IStorageComponentProvider getStorageComponentProvider();
+
+    @Override
+    INCServiceContext getServiceContext();
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java
index b54bb39..6211af4 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java
@@ -19,11 +19,12 @@
 package org.apache.asterix.common.cluster;
 
 import org.apache.asterix.common.api.IClusterEventsSubscriber;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 
 public interface IGlobalRecoveryManager extends IClusterEventsSubscriber {
 
     /**
      * Starts the global recovery process if the cluster state changed to ACTIVE.
      */
-    public void startGlobalRecovery();
+    public void startGlobalRecovery(ICcApplicationContext appCtx);
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
deleted file mode 100644
index c1264a9..0000000
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.config;
-
-public interface IPropertiesProvider {
-    StorageProperties getStorageProperties();
-
-    TransactionProperties getTransactionProperties();
-
-    CompilerProperties getCompilerProperties();
-
-    MetadataProperties getMetadataProperties();
-
-    ExternalProperties getExternalProperties();
-
-    ActiveProperties getActiveProperties();
-
-    BuildProperties getBuildProperties();
-
-    ReplicationProperties getReplicationProperties();
-
-    MessagingProperties getMessagingProperties();
-
-    NodeProperties getNodeProperties();
-}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
index 7f3f6aa..f122096 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
@@ -20,7 +20,7 @@
 
 import java.util.List;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
@@ -56,7 +56,7 @@
                 deviceId = i;
             }
         }
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getDatasetLifecycleManager().getVirtualBufferCaches(datasetID, deviceId);
     }
 
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java
index e16ce78..17da26a 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java
@@ -28,11 +28,10 @@
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IResourceLifecycleManager;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
@@ -51,8 +50,7 @@
     }
 
     @Override
-    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
-            throws HyracksDataException, IndexException {
+    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
         // This merge policy will only look at primary indexes in order to evaluate if a merge operation is needed. If it decides that
         // a merge operation is needed, then it will merge *all* the indexes that belong to the dataset. The criteria to decide if a merge
         // is needed is the same as the one that is used in the prefix merge policy:
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java
index d9381a2..cec9f57 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java
@@ -24,7 +24,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.storage.am.common.api.IResourceLifecycleManager;
@@ -43,7 +43,7 @@
 
     @Override
     public ILSMMergePolicy createMergePolicy(Map<String, String> properties, IHyracksTaskContext ctx) {
-        IDatasetLifecycleManager dslcManager = ((IAppRuntimeContext) ctx.getJobletContext()
+        IDatasetLifecycleManager dslcManager = ((INcApplicationContext) ctx.getJobletContext()
                 .getServiceContext().getApplicationContext()).getDatasetLifecycleManager();
         ILSMMergePolicy policy = new CorrelatedPrefixMergePolicy(dslcManager, datasetID);
         policy.configure(properties);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java
index 5dab970..0a40058 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java
@@ -19,7 +19,7 @@
 
 package org.apache.asterix.common.context;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.transactions.ITransactionSubsystem;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 
@@ -37,8 +37,8 @@
 
     @Override
     public ITransactionSubsystem getTransactionSubsystem(IHyracksTaskContext ctx) {
-        IAppRuntimeContext appCtx =
-                (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+        INcApplicationContext appCtx =
+                (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
         return appCtx.getTransactionSubsystem();
     }
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/IApplicationContextInfo.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
similarity index 76%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/IApplicationContextInfo.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
index 3c5328d..a9e6448 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/IApplicationContextInfo.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
@@ -18,9 +18,12 @@
  */
 package org.apache.asterix.common.dataflow;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
-import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.transactions.IResourceIdManager;
 import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.IJobLifecycleListener;
 import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import org.apache.hyracks.storage.common.IStorageManager;
 
@@ -33,7 +36,7 @@
  * and {@link org.apache.asterix.common.library.ILibraryManager}
  * at the cluster controller side.
  */
-public interface IApplicationContextInfo {
+public interface ICcApplicationContext extends IApplicationContext {
 
     /**
      * Returns an instance of the implementation for IIndexLifecycleManagerProvider.
@@ -50,7 +53,8 @@
     /**
      * @return an instance which implements {@link org.apache.hyracks.api.application.ICCServiceContext}
      */
-    public ICCServiceContext getCCServiceContext();
+    @Override
+    public ICCServiceContext getServiceContext();
 
     /**
      * @return the global recovery manager which implements
@@ -59,7 +63,14 @@
     public IGlobalRecoveryManager getGlobalRecoveryManager();
 
     /**
-     * @return the library manager which implements {@link org.apache.asterix.common.library.ILibraryManager}
+     * @return the active lifecycle listener at Cluster controller
      */
-    public ILibraryManager getLibraryManager();
+    public IJobLifecycleListener getActiveLifecycleListener();
+
+    /**
+     * @return a new instance of {@link IHyracksClientConnection}
+     */
+    public IHyracksClientConnection getHcc();
+
+    public IResourceIdManager getResourceIdManager();
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java
index 2eca55d..4b70dd7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java
@@ -20,7 +20,7 @@
 
 import java.nio.ByteBuffer;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.transactions.ILogMarkerCallback;
 import org.apache.asterix.common.transactions.PrimaryIndexLogMarkerCallback;
 import org.apache.hyracks.api.comm.VSizeFrame;
@@ -93,8 +93,8 @@
                 tupleFilter = tupleFilterFactory.createTupleFilter(ctx);
                 frameTuple = new FrameTupleReference();
             }
-            IAppRuntimeContext runtimeCtx =
-                    (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+            INcApplicationContext runtimeCtx =
+                    (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
             LSMIndexUtil.checkAndSetFirstLSN(lsmIndex, runtimeCtx.getTransactionSubsystem().getLogManager());
         } catch (Throwable th) {
             throw new HyracksDataException(th);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java
index ced2b6d..b2fde52 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java
@@ -29,5 +29,5 @@
      * @param nodeId
      * @throws Exception
      */
-    public void sendApplicationMessageToNC(IApplicationMessage msg, String nodeId) throws Exception;
+    public void sendApplicationMessageToNC(INcAddressedMessage msg, String nodeId) throws Exception;
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICcAddressedMessage.java
similarity index 82%
copy from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
copy to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICcAddressedMessage.java
index 6e8c4cf..37cba9c 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICcAddressedMessage.java
@@ -18,15 +18,16 @@
  */
 package org.apache.asterix.common.messaging.api;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.messages.IMessage;
-import org.apache.hyracks.api.service.IControllerService;
 
 @FunctionalInterface
-public interface IApplicationMessage extends IMessage {
+public interface ICcAddressedMessage extends IMessage {
 
     /**
      * handle the message upon delivery
      */
-    void handle(IControllerService cs) throws HyracksDataException, InterruptedException;
+    void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException;
+
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
index 707f864..e1101b3 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
@@ -29,7 +29,7 @@
      * @param callback
      * @throws Exception
      */
-    public void sendMessageToCC(IApplicationMessage message) throws Exception;
+    public void sendMessageToCC(ICcAddressedMessage message) throws Exception;
 
     /**
      * Sends application message from this NC to another NC.
@@ -38,7 +38,7 @@
      * @param callback
      * @throws Exception
      */
-    public void sendMessageToNC(String nodeId, IApplicationMessage message)
+    public void sendMessageToNC(String nodeId, INcAddressedMessage message)
             throws Exception;
 
     /**
@@ -46,5 +46,5 @@
      *
      * @param msg
      */
-    public void queueReceivedMessage(IApplicationMessage msg);
+    public void queueReceivedMessage(INcAddressedMessage msg);
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INcAddressedMessage.java
similarity index 82%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INcAddressedMessage.java
index 6e8c4cf..0eef06c 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INcAddressedMessage.java
@@ -18,15 +18,16 @@
  */
 package org.apache.asterix.common.messaging.api;
 
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.messages.IMessage;
-import org.apache.hyracks.api.service.IControllerService;
 
 @FunctionalInterface
-public interface IApplicationMessage extends IMessage {
+public interface INcAddressedMessage extends IMessage {
 
     /**
      * handle the message upon delivery
      */
-    void handle(IControllerService cs) throws HyracksDataException, InterruptedException;
+    void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException;
+
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java
index 46d5d98..5c286cc 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java
@@ -19,7 +19,7 @@
 package org.apache.asterix.common.replication;
 
 import org.apache.asterix.common.cluster.IClusterStateManager;
-import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IFaultToleranceStrategy {
@@ -62,6 +62,6 @@
      * @param messageBroker
      * @return
      */
-    IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker);
+    IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy);
 
 }
\ No newline at end of file
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java
index c19b0aa..87b0856 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java
@@ -18,9 +18,9 @@
  */
 package org.apache.asterix.common.replication;
 
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.hyracks.api.messages.IMessage;
 
-public interface INCLifecycleMessage extends IApplicationMessage {
+public interface INCLifecycleMessage extends IMessage {
 
     public enum MessageType {
         REPLAY_LOGS_REQUEST,
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java
index 4ff1f47..c27d2de 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java
@@ -19,7 +19,7 @@
 package org.apache.asterix.common.transactions;
 
 import org.apache.asterix.common.api.ThreadExecutor;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -50,5 +50,5 @@
 
     public IIOManager getIOManager();
 
-    public IAppRuntimeContext getAppContext();
+    public INcApplicationContext getAppContext();
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index 46da770..2eb81d4 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -23,7 +23,8 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.IApplicationContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataFlowController;
@@ -47,6 +48,8 @@
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.INCServiceContext;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
@@ -88,10 +91,11 @@
     @Override
     public synchronized IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx =
-                (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+        INCServiceContext serviceCtx = ctx.getJobletContext().getServiceContext();
+        INcApplicationContext appCtx =
+                (INcApplicationContext) serviceCtx.getApplicationContext();
         try {
-            restoreExternalObjects(appCtx.getLibraryManager());
+            restoreExternalObjects(serviceCtx, appCtx.getLibraryManager());
         } catch (Exception e) {
             LOGGER.log(Level.INFO, "Failure restoring external objects", e);
             throw HyracksDataException.create(e);
@@ -111,7 +115,7 @@
         }
     }
 
-    private void restoreExternalObjects(ILibraryManager libraryManager)
+    private void restoreExternalObjects(IServiceContext serviceContext, ILibraryManager libraryManager)
             throws HyracksDataException, AlgebricksException {
         if (dataSourceFactory == null) {
             dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(libraryManager, configuration);
@@ -119,7 +123,7 @@
             if (dataSourceFactory.isIndexible() && (files != null)) {
                 ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
             }
-            dataSourceFactory.configure(configuration);
+            dataSourceFactory.configure(serviceContext, configuration);
         }
         if (dataParserFactory == null) {
             // create and configure parser factory
@@ -131,17 +135,19 @@
     }
 
     @Override
-    public void configure(ILibraryManager libraryManager, Map<String, String> configuration)
+    public void configure(IServiceContext serviceContext, Map<String, String> configuration)
             throws HyracksDataException, AlgebricksException {
         this.configuration = configuration;
+        IApplicationContext appCtx = (IApplicationContext) serviceContext.getApplicationContext();
         ExternalDataUtils.validateDataSourceParameters(configuration);
-        dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(libraryManager, configuration);
+        dataSourceFactory =
+                DatasourceFactoryProvider.getExternalDataSourceFactory(appCtx.getLibraryManager(), configuration);
         if (dataSourceFactory.isIndexible() && (files != null)) {
             ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
         }
-        dataSourceFactory.configure(configuration);
+        dataSourceFactory.configure(serviceContext, configuration);
         ExternalDataUtils.validateDataParserParameters(configuration);
-        dataParserFactory = ParserFactoryProvider.getDataParserFactory(libraryManager, configuration);
+        dataParserFactory = ParserFactoryProvider.getDataParserFactory(appCtx.getLibraryManager(), configuration);
         dataParserFactory.setRecordType(recordType);
         dataParserFactory.setMetaType(metaType);
         dataParserFactory.configure(configuration);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
index 6b69d9c..a31b46d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
@@ -21,7 +21,7 @@
 import java.io.Serializable;
 import java.util.Map;
 
-import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.external.api.ILookupReaderFactory;
 import org.apache.asterix.external.api.ILookupRecordReader;
 import org.apache.asterix.external.api.IRecordDataParser;
@@ -34,6 +34,7 @@
 import org.apache.asterix.external.provider.ParserFactoryProvider;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.IMissingWriterFactory;
@@ -66,8 +67,8 @@
             ExternalFileIndexAccessor snapshotAccessor, IFrameWriter writer) throws HyracksDataException {
         try {
             IRecordDataParser<T> dataParser = dataParserFactory.createRecordParser(ctx);
-            ILookupRecordReader<? extends T> reader = readerFactory.createRecordReader(ctx, partition,
-                    snapshotAccessor);
+            ILookupRecordReader<? extends T> reader =
+                    readerFactory.createRecordReader(ctx, partition, snapshotAccessor);
             reader.configure(configuration);
             RecordIdReader ridReader = RecordIdReaderFactory.create(configuration, ridFields);
             return new LookupAdapter<>(dataParser, reader, inRecDesc, ridReader, retainInput, retainMissing,
@@ -77,14 +78,15 @@
         }
     }
 
-    public void configure(ILibraryManager libraryManager, Map<String, String> configuration)
+    public void configure(IServiceContext serviceContext, Map<String, String> configuration)
             throws HyracksDataException, AlgebricksException {
         this.configuration = configuration;
-        readerFactory = LookupReaderFactoryProvider.getLookupReaderFactory(configuration);
-        dataParserFactory = (IRecordDataParserFactory<T>) ParserFactoryProvider.getDataParserFactory(libraryManager,
-                configuration);
+        IApplicationContext appCtx = (IApplicationContext) serviceContext.getApplicationContext();
+        readerFactory = LookupReaderFactoryProvider.getLookupReaderFactory(serviceContext, configuration);
+        dataParserFactory = (IRecordDataParserFactory<T>) ParserFactoryProvider
+                .getDataParserFactory(appCtx.getLibraryManager(), configuration);
         dataParserFactory.setRecordType(recordType);
-        readerFactory.configure(configuration);
+        readerFactory.configure(serviceContext, configuration);
         dataParserFactory.configure(configuration);
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
index df1b43e..40bc7d8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
@@ -21,10 +21,10 @@
 import java.io.Serializable;
 import java.util.Map;
 
-import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -70,12 +70,12 @@
     /**
      * Configure the adapter
      *
-     * @param libraryManager
+     * @param serviceContext
      * @param configuration
      * @throws AlgebricksException
      * @throws HyracksDataException
      */
-    void configure(ILibraryManager libraryManager, Map<String, String> configuration)
+    void configure(IServiceContext serviceContext, Map<String, String> configuration)
             throws HyracksDataException, AlgebricksException;
 
     /**
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
index 5538369..edda448 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
@@ -24,11 +24,12 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IExternalDataSourceFactory extends Serializable {
@@ -62,7 +63,8 @@
      * @param configuration
      * @throws AsterixException
      */
-    public void configure(Map<String, String> configuration) throws AlgebricksException, HyracksDataException;
+    public void configure(IServiceContext ctx, Map<String, String> configuration)
+            throws AlgebricksException, HyracksDataException;
 
     /**
      * Specify whether the external data source can be indexed
@@ -82,11 +84,11 @@
      * @return
      * @throws AlgebricksException
      */
-    public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(
+    public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(IApplicationContext appCtx,
             AlgebricksAbsolutePartitionConstraint constraints, int count) throws AlgebricksException {
         if (constraints == null) {
             ArrayList<String> locs = new ArrayList<>();
-            Set<String> stores = AppContextInfo.INSTANCE.getMetadataProperties().getStores().keySet();
+            Set<String> stores = appCtx.getMetadataProperties().getStores().keySet();
             if (stores.isEmpty()) {
                 throw new AlgebricksException("Configurations don't have any stores");
             }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java
index b0f1ae7..99ffdf1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java
@@ -18,11 +18,13 @@
  */
 package org.apache.asterix.external.api;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 
 /**
  * A policy for resolving a name to a node controller id.
  */
+@FunctionalInterface
 public interface INodeResolver {
 
     /**
@@ -33,5 +35,5 @@
      * @return resolved result (a node controller id)
      * @throws AsterixException
      */
-    public String resolveNode(String value) throws AsterixException;
+    String resolveNode(ICcApplicationContext appCtx, String value) throws AsterixException;
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java
index f49da3c..6f3b667 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java
@@ -25,15 +25,15 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.active.ActiveEvent;
-import org.apache.asterix.active.ActiveJobNotificationHandler;
+import org.apache.asterix.active.ActiveLifecycleListener;
 import org.apache.asterix.active.ActivityState;
 import org.apache.asterix.active.EntityId;
 import org.apache.asterix.active.IActiveEventSubscriber;
 import org.apache.asterix.active.message.ActivePartitionMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.metadata.IDataset;
 import org.apache.asterix.external.feed.watch.FeedEventSubscriber;
 import org.apache.asterix.external.feed.watch.NoOpSubscriber;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.JobStatus;
@@ -42,11 +42,14 @@
     // constants
     private static final Logger LOGGER = Logger.getLogger(FeedEventsListener.class.getName());
     // members
+    private final ICcApplicationContext appCtx;
     private final String[] sources;
     private final List<IActiveEventSubscriber> subscribers;
     private int numRegistered;
 
-    public FeedEventsListener(EntityId entityId, List<IDataset> datasets, String[] sources) {
+    public FeedEventsListener(ICcApplicationContext appCtx, EntityId entityId, List<IDataset> datasets,
+            String[] sources) {
+        this.appCtx = appCtx;
         this.entityId = entityId;
         this.datasets = datasets;
         this.sources = sources;
@@ -103,10 +106,11 @@
     }
 
     private void finish() throws Exception {
-        IHyracksClientConnection hcc = AppContextInfo.INSTANCE.getHcc();
+        IHyracksClientConnection hcc = appCtx.getHcc();
         JobStatus status = hcc.getJobStatus(jobId);
         state = status.equals(JobStatus.FAILURE) ? ActivityState.FAILED : ActivityState.STOPPED;
-        ActiveJobNotificationHandler.INSTANCE.removeListener(this);
+        ActiveLifecycleListener activeLcListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+        activeLcListener.getNotificationHandler().removeListener(this);
     }
 
     private void start(ActiveEvent event) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java
index a3f277a..cdb40d8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java
@@ -43,7 +43,6 @@
 import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelper;
 import org.apache.hyracks.storage.am.lsm.btree.impls.ExternalBTree;
@@ -97,7 +96,7 @@
         fileIndexSearchCursor = fileIndexAccessor.createSearchCursor(false);
     }
 
-    public void lookup(int fileId, ExternalFile file) throws HyracksDataException, IndexException {
+    public void lookup(int fileId, ExternalFile file) throws HyracksDataException {
         // Set search parameters
         currentFileNumber.setValue(fileId);
         searchKeyTupleBuilder.reset();
@@ -130,8 +129,9 @@
                         .getStringValue());
         file.setSize(((AInt64) externalFileRecord.getValueByPos(FilesIndexDescription.EXTERNAL_FILE_SIZE_FIELD_INDEX))
                 .getLongValue());
-        file.setLastModefiedTime(new Date(((ADateTime) externalFileRecord
-                .getValueByPos(FilesIndexDescription.EXTERNAL_FILE_MOD_DATE_FIELD_INDEX)).getChrononTime()));
+        file.setLastModefiedTime(new Date(
+                ((ADateTime) externalFileRecord.getValueByPos(FilesIndexDescription.EXTERNAL_FILE_MOD_DATE_FIELD_INDEX))
+                        .getChrononTime()));
     }
 
     public void close() throws HyracksDataException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
index 3d1297d..b58feba 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
@@ -23,6 +23,7 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.AsterixInputStream;
 import org.apache.asterix.external.api.IExternalIndexer;
@@ -46,6 +47,8 @@
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.hdfs.dataflow.ConfFactory;
@@ -56,6 +59,7 @@
 
     protected static final long serialVersionUID = 1L;
     protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    protected transient IServiceContext serviceCtx;
     protected String[] readSchedule;
     protected boolean read[];
     protected InputSplitsFactory inputSplitsFactory;
@@ -75,10 +79,11 @@
     private Format format;
 
     @Override
-    public void configure(Map<String, String> configuration) throws AsterixException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
         try {
-            init();
+            this.serviceCtx = serviceCtx;
             this.configuration = configuration;
+            init((ICCServiceContext) serviceCtx);
             JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
             confFactory = new ConfFactory(conf);
             clusterLocations = getPartitionConstraint();
@@ -153,7 +158,8 @@
      */
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
-        clusterLocations = HDFSUtils.getPartitionConstraints(clusterLocations);
+        clusterLocations = HDFSUtils.getPartitionConstraints((IApplicationContext) serviceCtx.getApplicationContext(),
+                clusterLocations);
         return clusterLocations;
     }
 
@@ -161,12 +167,12 @@
      * This method initialize the scheduler which assigns responsibility of reading different logical input splits from
      * HDFS
      */
-    private static void init() throws HyracksDataException {
+    private static void init(ICCServiceContext serviceCtx) throws HyracksDataException {
         if (!initialized) {
             synchronized (initLock) {
                 if (!initialized) {
-                    hdfsScheduler = HDFSUtils.initializeHDFSScheduler();
-                    indexingScheduler = HDFSUtils.initializeIndexingHDFSScheduler();
+                    hdfsScheduler = HDFSUtils.initializeHDFSScheduler(serviceCtx);
+                    indexingScheduler = HDFSUtils.initializeIndexingHDFSScheduler(serviceCtx);
                     initialized = true;
                 }
             }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
index bfcacd8..98f78cc 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.ILookupReaderFactory;
 import org.apache.asterix.external.api.ILookupRecordReader;
@@ -30,6 +31,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.hdfs.dataflow.ConfFactory;
@@ -40,6 +42,7 @@
     protected ConfFactory confFactory;
     protected Map<String, String> configuration;
     protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    protected transient IServiceContext serviceCtx;
 
     public HDFSLookupReaderFactory() {
     }
@@ -51,12 +54,14 @@
 
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
-        clusterLocations = HDFSUtils.getPartitionConstraints(clusterLocations);
+        clusterLocations = HDFSUtils.getPartitionConstraints((IApplicationContext) serviceCtx.getApplicationContext(),
+                clusterLocations);
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws AsterixException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
+        this.serviceCtx = serviceCtx;
         this.configuration = configuration;
         JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
         try {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
index 2ded3fb..03200a9 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
@@ -23,12 +23,14 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -37,8 +39,9 @@
 public class RSSRecordReaderFactory implements IRecordReaderFactory<SyndEntryImpl> {
 
     private static final long serialVersionUID = 1L;
-    private final List<String> urls = new ArrayList<String>();
+    private final List<String> urls = new ArrayList<>();
     private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    private transient IServiceContext serviceContext;
 
     @Override
     public DataSourceType getDataSourceType() {
@@ -48,12 +51,14 @@
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
         int count = urls.size();
-        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, count);
+        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+                (IApplicationContext) serviceContext.getApplicationContext(), clusterLocations, count);
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) {
+    public void configure(IServiceContext serviceContext, Map<String, String> configuration) {
+        this.serviceContext = serviceContext;
         String url = configuration.get(ExternalDataConstants.KEY_RSS_URL);
         if (url == null) {
             throw new IllegalArgumentException("no RSS URL provided");
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
index 9b23e38..7d75af8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
@@ -27,6 +27,7 @@
 import org.apache.asterix.external.provider.StreamRecordReaderProvider.Format;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -57,9 +58,10 @@
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws HyracksDataException, AlgebricksException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration)
+            throws HyracksDataException, AlgebricksException {
         this.configuration = configuration;
-        streamFactory.configure(configuration);
+        streamFactory.configure(serviceCtx, configuration);
         format = StreamRecordReaderProvider.getReaderFormat(configuration);
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
index 4d8be98..6ff0b6c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
@@ -22,6 +22,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IRecordReader;
@@ -32,6 +33,7 @@
 import org.apache.asterix.external.util.TwitterUtil.SearchAPIConstants;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -47,6 +49,7 @@
 
     private Map<String, String> configuration;
     private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    private transient IServiceContext serviceCtx;
 
     public static boolean isTwitterPull(Map<String, String> configuration) {
         String reader = configuration.get(ExternalDataConstants.KEY_READER);
@@ -64,13 +67,16 @@
 
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
-        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, INTAKE_CARDINALITY);
+        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+                (IApplicationContext) serviceCtx.getApplicationContext(),
+                clusterLocations, INTAKE_CARDINALITY);
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws AsterixException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
+        this.serviceCtx = serviceCtx;
         TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
         if (!validateConfiguration(configuration)) {
             StringBuilder builder = new StringBuilder();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
index a2e3704..44b0b43 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
@@ -26,6 +26,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.AsterixInputStream;
 import org.apache.asterix.external.api.IInputStreamFactory;
@@ -37,6 +38,7 @@
 import org.apache.asterix.external.util.FileSystemWatcher;
 import org.apache.asterix.external.util.NodeResolverFactory;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.UnmanagedFileSplit;
@@ -83,10 +85,12 @@
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws AsterixException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
         String[] splits = configuration.get(ExternalDataConstants.KEY_PATH).split(",");
-        configureFileSplits(splits);
+        if (inputFileSplits == null) {
+            configureFileSplits((ICcApplicationContext) serviceCtx.getApplicationContext(), splits);
+        }
         configurePartitionConstraint();
         this.isFeed = ExternalDataUtils.isFeed(configuration) && ExternalDataUtils.keepDataSourceOpen(configuration);
         this.expression = configuration.get(ExternalDataConstants.KEY_EXPRESSION);
@@ -97,25 +101,24 @@
         return constraints;
     }
 
-    private void configureFileSplits(String[] splits) throws AsterixException {
+    private void configureFileSplits(ICcApplicationContext appCtx, String[] splits) throws AsterixException {
         INodeResolver resolver = getNodeResolver();
-        if (inputFileSplits == null) {
-            inputFileSplits = new UnmanagedFileSplit[splits.length];
-            String node;
-            String path;
-            int count = 0;
-            String trimmedValue;
-            for (String splitPath : splits) {
-                trimmedValue = splitPath.trim();
-                if (!trimmedValue.contains("://")) {
-                    throw new AsterixException(
-                            "Invalid path: " + splitPath + "\nUsage- path=\"Host://Absolute File Path\"");
-                }
-                node = resolver.resolveNode(trimmedValue.split(":")[0]);
-                path = trimmedValue.split("://")[1];
-                inputFileSplits[count++] = new UnmanagedFileSplit(node, path);
+        inputFileSplits = new UnmanagedFileSplit[splits.length];
+        String node;
+        String path;
+        int count = 0;
+        String trimmedValue;
+        for (String splitPath : splits) {
+            trimmedValue = splitPath.trim();
+            if (!trimmedValue.contains("://")) {
+                throw new AsterixException(
+                        "Invalid path: " + splitPath + "\nUsage- path=\"Host://Absolute File Path\"");
             }
+            node = resolver.resolveNode(appCtx, trimmedValue.split(":")[0]);
+            path = trimmedValue.split("://")[1];
+            inputFileSplits[count++] = new UnmanagedFileSplit(node, path);
         }
+
     }
 
     private void configurePartitionConstraint() throws AsterixException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java
index 8ab8ead..9a0e718 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java
@@ -25,6 +25,7 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.AsterixInputStream;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
@@ -35,25 +36,29 @@
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class SocketClientInputStreamFactory implements IInputStreamFactory {
 
     private static final long serialVersionUID = 1L;
+    private transient IServiceContext serviceCtx;
     private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
     private List<Pair<String, Integer>> sockets;
 
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
-        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, sockets.size());
+        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+                (IApplicationContext) serviceCtx.getApplicationContext(), clusterLocations, sockets.size());
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws AsterixException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
         try {
-            this.sockets = new ArrayList<Pair<String, Integer>>();
+            this.serviceCtx = serviceCtx;
+            this.sockets = new ArrayList<>();
             String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
             if (socketsValue == null) {
                 throw new IllegalArgumentException(
@@ -66,7 +71,7 @@
                 int port = Integer.parseInt(socketTokens[1].trim());
                 InetAddress[] resolved;
                 resolved = SystemDefaultDnsResolver.INSTANCE.resolve(host);
-                Pair<String, Integer> p = new Pair<String, Integer>(resolved[0].getHostAddress(), port);
+                Pair<String, Integer> p = new Pair<>(resolved[0].getHostAddress(), port);
                 sockets.add(p);
             }
         } catch (UnknownHostException e) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java
index f8aac81..05931b2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java
@@ -29,6 +29,7 @@
 import java.util.Random;
 import java.util.Set;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
@@ -39,8 +40,8 @@
 import org.apache.asterix.runtime.utils.RuntimeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -56,9 +57,10 @@
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws AlgebricksException {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration)
+            throws AsterixException, CompilationException {
         try {
-            sockets = new ArrayList<Pair<String, Integer>>();
+            sockets = new ArrayList<>();
             String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
             if (modeValue != null) {
                 mode = Mode.valueOf(modeValue.trim().toUpperCase());
@@ -68,8 +70,9 @@
                 throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
             }
             Map<InetAddress, Set<String>> ncMap;
-            ncMap = RuntimeUtils.getNodeControllerMap();
-            List<String> ncs = RuntimeUtils.getAllNodeControllers();
+            ncMap = RuntimeUtils.getNodeControllerMap((ICcApplicationContext) serviceCtx.getApplicationContext());
+            List<String> ncs =
+                    RuntimeUtils.getAllNodeControllers((ICcApplicationContext) serviceCtx.getApplicationContext());
             String[] socketsArray = socketsValue.split(",");
             Random random = new Random();
             for (String socket : socketsArray) {
@@ -87,11 +90,11 @@
                         }
                         String[] ncArray = ncsOnIp.toArray(new String[] {});
                         String nc = ncArray[random.nextInt(ncArray.length)];
-                        p = new Pair<String, Integer>(nc, port);
+                        p = new Pair<>(nc, port);
                         break;
 
                     case NC:
-                        p = new Pair<String, Integer>(host, port);
+                        p = new Pair<>(host, port);
                         if (!ncs.contains(host)) {
                             throw new CompilationException(
                                     ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "NC", host,
@@ -127,7 +130,7 @@
 
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
-        List<String> locations = new ArrayList<String>();
+        List<String> locations = new ArrayList<>();
         for (Pair<String, Integer> socket : sockets) {
             locations.add(socket.first);
         }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java
index 936f1f8..b32006c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java
@@ -28,6 +28,7 @@
 import org.apache.asterix.external.input.stream.TwitterFirehoseInputStream;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -82,7 +83,7 @@
     }
 
     @Override
-    public void configure(Map<String, String> configuration) {
+    public void configure(IServiceContext serviceCtx, Map<String, String> configuration) {
         this.configuration = configuration;
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
index 866fd9c..493a0bf 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
@@ -20,7 +20,7 @@
 
 import java.io.IOException;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.common.library.ILibraryManager;
@@ -32,7 +32,6 @@
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.EnumDeserializer;
 import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
@@ -56,7 +55,8 @@
     protected final IScalarEvaluator[] argumentEvaluators;
     protected final JavaFunctionHelper functionHelper;
 
-    public ExternalFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[], IHyracksTaskContext context)
+    public ExternalFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[], IHyracksTaskContext context,
+            IApplicationContext appCtx)
             throws HyracksDataException {
         this.finfo = finfo;
         this.evaluatorFactories = args;
@@ -69,16 +69,7 @@
         String[] fnameComponents = finfo.getFunctionIdentifier().getName().split("#");
         String functionLibary = fnameComponents[0];
         String dataverse = finfo.getFunctionIdentifier().getNamespace();
-        ILibraryManager libraryManager;
-        if (context == null) {
-            // Gets the library manager for compile-time constant folding.
-            libraryManager = AppContextInfo.INSTANCE.getLibraryManager();
-        } else {
-            // Gets the library manager for real runtime evaluation.
-            IAppRuntimeContext runtimeCtx = (IAppRuntimeContext) context.getJobletContext()
-                    .getServiceContext().getApplicationContext();
-            libraryManager = runtimeCtx.getLibraryManager();
-        }
+        ILibraryManager libraryManager = appCtx.getLibraryManager();
         ClassLoader libraryClassLoader = libraryManager.getLibraryClassLoader(dataverse, functionLibary);
         String classname = finfo.getFunctionBody().trim();
         Class<?> clazz;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
index 5c27561..2e9e231 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.library;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.om.functions.IExternalFunctionInfo;
 import org.apache.asterix.om.functions.IFunctionDescriptor;
 import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
@@ -28,11 +29,11 @@
 
 public class ExternalFunctionDescriptorProvider {
 
-    public static IFunctionDescriptor getExternalFunctionDescriptor(IExternalFunctionInfo finfo)
-            throws AlgebricksException {
+    public static IFunctionDescriptor getExternalFunctionDescriptor(IExternalFunctionInfo finfo,
+            IApplicationContext appCtx) throws AlgebricksException {
         switch (finfo.getKind()) {
             case SCALAR:
-                return new ExternalScalarFunctionDescriptor(finfo);
+                return new ExternalScalarFunctionDescriptor(finfo, appCtx);
             case AGGREGATE:
             case UNNEST:
                 throw new AlgebricksException("Unsupported function kind :" + finfo.getKind());
@@ -48,10 +49,16 @@
     private static final long serialVersionUID = 1L;
     private final IFunctionInfo finfo;
     private IScalarEvaluatorFactory evaluatorFactory;
+    private final transient IApplicationContext appCtx;
+
+    public ExternalScalarFunctionDescriptor(IFunctionInfo finfo, IApplicationContext appCtx) {
+        this.finfo = finfo;
+        this.appCtx = appCtx;
+    }
 
     @Override
     public IScalarEvaluatorFactory createEvaluatorFactory(IScalarEvaluatorFactory[] args) throws AlgebricksException {
-        evaluatorFactory = new ExternalScalarFunctionEvaluatorFactory((IExternalFunctionInfo) finfo, args);
+        evaluatorFactory = new ExternalScalarFunctionEvaluatorFactory((IExternalFunctionInfo) finfo, args, appCtx);
         return evaluatorFactory;
     }
 
@@ -60,8 +67,4 @@
         return finfo.getFunctionIdentifier();
     }
 
-    public ExternalScalarFunctionDescriptor(IFunctionInfo finfo) {
-        this.finfo = finfo;
-    }
-
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
index 0e80e4b..7602b9a 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.library;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.api.IExternalFunction;
@@ -35,14 +36,14 @@
 public class ExternalFunctionProvider {
 
     public static IExternalFunction getExternalFunctionEvaluator(IExternalFunctionInfo finfo,
-            IScalarEvaluatorFactory args[], IHyracksTaskContext context) throws HyracksDataException {
+            IScalarEvaluatorFactory args[], IHyracksTaskContext context, IApplicationContext appCtx)
+            throws HyracksDataException {
         switch (finfo.getKind()) {
             case SCALAR:
-                return new ExternalScalarFunction(finfo, args, context);
+                return new ExternalScalarFunction(finfo, args, context, appCtx);
             case AGGREGATE:
             case UNNEST:
-                throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNSUPPORTED_KIND,
-                        finfo.getKind());
+                throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNSUPPORTED_KIND, finfo.getKind());
             default:
                 throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNKNOWN_KIND, finfo.getKind());
         }
@@ -52,8 +53,8 @@
 class ExternalScalarFunction extends ExternalFunction implements IExternalScalarFunction, IScalarEvaluator {
 
     public ExternalScalarFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[],
-            IHyracksTaskContext context) throws HyracksDataException {
-        super(finfo, args, context);
+            IHyracksTaskContext context, IApplicationContext appCtx) throws HyracksDataException {
+        super(finfo, args, context, appCtx);
         try {
             initialize(functionHelper);
         } catch (Exception e) {
@@ -78,11 +79,11 @@
         try {
             resultBuffer.reset();
             ((IExternalScalarFunction) externalFunction).evaluate(argumentProvider);
-        /*
-         * Make sure that if "setResult" is not called,
-         * or the result object is missing we let Hyracks storage manager know
-         * we want to discard a missing object
-         */
+            /*
+             * Make sure that if "setResult" is not called,
+             * or the result object is missing we let Hyracks storage manager know
+             * we want to discard a missing object
+             */
             byte byteOutput = resultBuffer.getByteArray()[0];
             if (!argumentProvider.isValidResult() || byteOutput == ATypeTag.SERIALIZED_MISSING_TYPE_TAG) {
                 resultBuffer.getDataOutput().writeByte(ATypeTag.SERIALIZED_MISSING_TYPE_TAG);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
index 99d40c7..8208013 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.library;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.om.functions.IExternalFunctionInfo;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
@@ -30,16 +31,21 @@
     private static final long serialVersionUID = 1L;
     private final IExternalFunctionInfo finfo;
     private final IScalarEvaluatorFactory[] args;
+    private final transient IApplicationContext appCtx;
 
-    public ExternalScalarFunctionEvaluatorFactory(IExternalFunctionInfo finfo, IScalarEvaluatorFactory[] args)
-            throws AlgebricksException {
+    public ExternalScalarFunctionEvaluatorFactory(IExternalFunctionInfo finfo, IScalarEvaluatorFactory[] args,
+            IApplicationContext appCtx) throws AlgebricksException {
         this.finfo = finfo;
         this.args = args;
+        this.appCtx = appCtx;
     }
 
     @Override
     public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
-        return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, ctx);
+        return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, ctx,
+                appCtx == null
+                        ? (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext()
+                        : appCtx);
     }
 
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java
index 33d508e..8cd157c 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java
@@ -22,6 +22,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.om.base.AMutableInt32;
@@ -35,7 +36,7 @@
 
 public class RuntimeExternalFunctionUtil {
 
-    private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<String, ClassLoader>();
+    private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<>();
 
     public static void registerLibraryClassLoader(String dataverseName, String libraryName, ClassLoader classLoader)
             throws RuntimeDataException {
@@ -55,10 +56,11 @@
         }
     }
 
-    public static IFunctionDescriptor getFunctionDescriptor(IFunctionInfo finfo) throws RuntimeDataException {
+    public static IFunctionDescriptor getFunctionDescriptor(IFunctionInfo finfo, IApplicationContext appCtx)
+            throws RuntimeDataException {
         switch (((IExternalFunctionInfo) finfo).getKind()) {
             case SCALAR:
-                return getScalarFunctionDescriptor(finfo);
+                return getScalarFunctionDescriptor(finfo, appCtx);
             case AGGREGATE:
             case UNNEST:
             case STATEFUL:
@@ -68,8 +70,9 @@
         return null;
     }
 
-    private static AbstractScalarFunctionDynamicDescriptor getScalarFunctionDescriptor(IFunctionInfo finfo) {
-        return new ExternalScalarFunctionDescriptor(finfo);
+    private static AbstractScalarFunctionDynamicDescriptor getScalarFunctionDescriptor(IFunctionInfo finfo,
+            IApplicationContext appCtx) {
+        return new ExternalScalarFunctionDescriptor(finfo, appCtx);
     }
 
     public static ByteBuffer allocateArgumentBuffers(IAType type) {
@@ -91,15 +94,15 @@
                 return new AMutableString("");
             default:
                 return null;
-                /*
-                ARecordType recordType = (ARecordType) type;
-                IAType[] fieldTypes = recordType.getFieldTypes();
-                IAObject[] fields = new IAObject[fieldTypes.length];
-                for (int i = 0; i < fields.length; i++) {
-                    fields[i] = allocateArgumentObjects(fieldTypes[i]);
-                }
-                return new AMutableRecord((ARecordType) type, fields);
-                */
+            /*
+            ARecordType recordType = (ARecordType) type;
+            IAType[] fieldTypes = recordType.getFieldTypes();
+            IAObject[] fields = new IAObject[fieldTypes.length];
+            for (int i = 0; i < fields.length; i++) {
+                fields[i] = allocateArgumentObjects(fieldTypes[i]);
+            }
+            return new AMutableRecord((ARecordType) type, fields);
+            */
         }
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
index 2ddb646..1559469 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
 import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
@@ -41,28 +40,23 @@
             IIndexDataflowHelperFactory filesIndexDataflowHelperFactory, IndexInfoOperatorDescriptor fileIndexesInfo,
             List<IIndexDataflowHelperFactory> indexesDataflowHelperFactories,
             List<IndexInfoOperatorDescriptor> indexesInfos) {
-        super(spec, filesIndexDataflowHelperFactory, fileIndexesInfo, indexesDataflowHelperFactories,
-                indexesInfos);
+        super(spec, filesIndexDataflowHelperFactory, fileIndexesInfo, indexesDataflowHelperFactories, indexesInfos);
     }
 
     @Override
     protected void performOpOnIndex(IIndexDataflowHelperFactory indexDataflowHelperFactory, IHyracksTaskContext ctx,
-            IndexInfoOperatorDescriptor fileIndexInfo, int partition) {
-        try {
-            FileReference resourecePath =
-                    IndexFileNameUtil.getIndexAbsoluteFileRef(fileIndexInfo, partition, ctx.getIOManager());
-            LOGGER.warn("performing the operation on " + resourecePath.getFile().getAbsolutePath());
-            // Get DataflowHelper
-            IIndexDataflowHelper indexHelper =
-                    indexDataflowHelperFactory.createIndexDataflowHelper(fileIndexInfo, ctx, partition);
-            // Get index
-            IIndex index = indexHelper.getIndexInstance();
-            // commit transaction
-            ((ITwoPCIndex) index).commitTransaction();
-            LOGGER.warn("operation on " + resourecePath.getFile().getAbsolutePath() + " Succeded");
-        } catch (HyracksDataException | IndexException e) {
-            throw new IllegalStateException(e);
-        }
+            IndexInfoOperatorDescriptor fileIndexInfo, int partition) throws HyracksDataException {
+        FileReference resourecePath =
+                IndexFileNameUtil.getIndexAbsoluteFileRef(fileIndexInfo, partition, ctx.getIOManager());
+        LOGGER.warn("performing the operation on " + resourecePath.getFile().getAbsolutePath());
+        // Get DataflowHelper
+        IIndexDataflowHelper indexHelper =
+                indexDataflowHelperFactory.createIndexDataflowHelper(fileIndexInfo, ctx, partition);
+        // Get index
+        IIndex index = indexHelper.getIndexInstance();
+        // commit transaction
+        ((ITwoPCIndex) index).commitTransaction();
+        LOGGER.warn("operation on " + resourecePath.getFile().getAbsolutePath() + " Succeded");
     }
 
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java
index 2bb986d..556fb51 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.operators;
 
-import java.io.IOException;
 import java.util.List;
 
 import org.apache.asterix.external.indexing.ExternalFile;
@@ -39,7 +38,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
 import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
@@ -61,9 +59,9 @@
     private boolean createNewIndex;
     private List<ExternalFile> files;
 
-    public ExternalFilesIndexOperatorDescriptor(IOperatorDescriptorRegistry spec,
-            IStorageManager storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
-            IFileSplitProvider fileSplitProvider, IIndexDataflowHelperFactory dataflowHelperFactory,
+    public ExternalFilesIndexOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManager storageManager,
+            IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
+            IIndexDataflowHelperFactory dataflowHelperFactory,
             ILocalResourceFactoryProvider localResourceFactoryProvider, List<ExternalFile> files,
             boolean createNewIndex, IMetadataPageManagerFactory metadataPageManagerFactory) {
         super(spec, 0, 0, null, storageManager, lifecycleManagerProvider, fileSplitProvider,
@@ -79,8 +77,8 @@
     @Override
     public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
             IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        final IIndexDataflowHelper indexHelper = getIndexDataflowHelperFactory().createIndexDataflowHelper(this, ctx,
-                partition);
+        final IIndexDataflowHelper indexHelper =
+                getIndexDataflowHelperFactory().createIndexDataflowHelper(this, ctx, partition);
         return new AbstractOperatorNodePushable() {
 
             @SuppressWarnings("incomplete-switch")
@@ -95,15 +93,13 @@
                     try {
                         IIndex index = indexHelper.getIndexInstance();
                         // Create bulk loader
-                        IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false,
-                                files.size(), false);
+                        IIndexBulkLoader bulkLoader =
+                                index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false);
                         // Load files
                         for (ExternalFile file : files) {
                             bulkLoader.add(filesTupleTranslator.getTupleFromFile(file));
                         }
                         bulkLoader.end();
-                    } catch (IndexException | IOException e) {
-                        throw new HyracksDataException(e);
                     } finally {
                         indexHelper.close();
                     }
@@ -114,8 +110,8 @@
                     IIndex index = indexHelper.getIndexInstance();
                     LSMTwoPCBTreeBulkLoader bulkLoader = null;
                     try {
-                        bulkLoader = (LSMTwoPCBTreeBulkLoader) ((ExternalBTree) index).createTransactionBulkLoader(
-                                BTree.DEFAULT_FILL_FACTOR, false, files.size(), false);
+                        bulkLoader = (LSMTwoPCBTreeBulkLoader) ((ExternalBTree) index)
+                                .createTransactionBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false);
                         // Load files
                         // The files must be ordered according to their numbers
                         for (ExternalFile file : files) {
@@ -130,11 +126,11 @@
                             }
                         }
                         bulkLoader.end();
-                    } catch (IndexException | IOException e) {
+                    } catch (Exception e) {
                         if (bulkLoader != null) {
                             bulkLoader.abort();
                         }
-                        throw new HyracksDataException(e);
+                        throw HyracksDataException.create(e);
                     } finally {
                         indexHelper.close();
                     }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java
index c95a4a7..d3bd2bb 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IndexBulkLoadOperatorNodePushable;
 import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
 
@@ -38,8 +37,8 @@
 
     private final FilesIndexDescription filesIndexDescription = new FilesIndexDescription();
     private final int[] deletedFiles;
-    private ArrayTupleBuilder buddyBTreeTupleBuilder = new ArrayTupleBuilder(
-            filesIndexDescription.FILE_BUDDY_BTREE_RECORD_DESCRIPTOR.getFieldCount());
+    private ArrayTupleBuilder buddyBTreeTupleBuilder =
+            new ArrayTupleBuilder(filesIndexDescription.FILE_BUDDY_BTREE_RECORD_DESCRIPTOR.getFieldCount());
     private AMutableInt32 fileNumber = new AMutableInt32(0);
     private ArrayTupleReference deleteTuple = new ArrayTupleReference();
 
@@ -81,11 +80,7 @@
         int tupleCount = accessor.getTupleCount();
         for (int i = 0; i < tupleCount; i++) {
             tuple.reset(accessor, i);
-            try {
-                bulkLoader.add(tuple);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            bulkLoader.add(tuple);
         }
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
index f289361..121d262 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
@@ -23,7 +23,7 @@
 
 import org.apache.asterix.active.ActiveManager;
 import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
 import org.apache.asterix.external.feed.dataflow.SyncFeedRuntimeInputHandler;
 import org.apache.asterix.external.feed.management.FeedConnectionId;
@@ -51,7 +51,7 @@
         this.partition = partition;
         this.connectionId = feedConnectionId;
         this.policyAccessor = new FeedPolicyAccessor(feedPolicy);
-        this.activeManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+        this.activeManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
                 .getApplicationContext()).getActiveManager();
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
index b911bf1..1dce8ee 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -22,7 +22,7 @@
 import java.util.logging.Logger;
 
 import org.apache.asterix.active.EntityId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.common.library.ILibraryManager;
@@ -102,22 +102,22 @@
 
     private IAdapterFactory createExternalAdapterFactory(IHyracksTaskContext ctx) throws HyracksDataException {
         IAdapterFactory adapterFactory;
-        IAppRuntimeContext runtimeCtx = (IAppRuntimeContext) ctx.getJobletContext()
-                .getServiceContext().getApplicationContext();
+        INcApplicationContext runtimeCtx =
+                (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
         ILibraryManager libraryManager = runtimeCtx.getLibraryManager();
         ClassLoader classLoader = libraryManager.getLibraryClassLoader(feedId.getDataverse(), adaptorLibraryName);
         if (classLoader != null) {
             try {
                 adapterFactory = (IAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance());
                 adapterFactory.setOutputType(adapterOutputType);
-                adapterFactory.configure(libraryManager, adaptorConfiguration);
+                adapterFactory.configure(ctx.getJobletContext().getServiceContext(), adaptorConfiguration);
             } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
         } else {
             RuntimeDataException err = new RuntimeDataException(
-                    ErrorCode.OPERATORS_FEED_INTAKE_OPERATOR_DESCRIPTOR_CLASSLOADER_NOT_CONFIGURED,
-                    adaptorLibraryName, feedId.getDataverse());
+                    ErrorCode.OPERATORS_FEED_INTAKE_OPERATOR_DESCRIPTOR_CLASSLOADER_NOT_CONFIGURED, adaptorLibraryName,
+                    feedId.getDataverse());
             LOGGER.severe(err.getMessage());
             throw err;
         }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
index 6732b15..6acaefb 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
@@ -25,7 +25,7 @@
 
 import org.apache.asterix.active.ActiveManager;
 import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
 import org.apache.asterix.external.feed.dataflow.SyncFeedRuntimeInputHandler;
 import org.apache.asterix.external.feed.management.FeedConnectionId;
@@ -103,7 +103,7 @@
         this.policyAccessor = new FeedPolicyAccessor(feedPolicyProperties);
         this.partition = partition;
         this.connectionId = feedConnectionId;
-        this.feedManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+        this.feedManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
                 .getApplicationContext()).getActiveManager();
         this.message = new VSizeFrame(ctx);
         TaskUtil.putInSharedMap(HyracksConstants.KEY_MESSAGE, message, ctx);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
index f2193af..87b92c2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
@@ -25,7 +25,7 @@
 
 import org.apache.asterix.active.ActiveManager;
 import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.dataflow.LSMInsertDeleteOperatorNodePushable;
 import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
 import org.apache.asterix.external.feed.dataflow.SyncFeedRuntimeInputHandler;
@@ -97,7 +97,7 @@
         this.policyAccessor = new FeedPolicyAccessor(feedPolicyProperties);
         this.partition = partition;
         this.connectionId = feedConnectionId;
-        this.feedManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+        this.feedManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
                 .getApplicationContext()).getActiveManager();
         this.targetId = targetId;
         this.message = new VSizeFrame(ctx);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index 059de63..d6ac5d1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -21,7 +21,6 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
 import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
 import org.apache.asterix.external.api.IAdapterFactory;
@@ -30,6 +29,7 @@
 import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.dataflow.value.IMissingWriterFactory;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -39,19 +39,19 @@
 public class AdapterFactoryProvider {
 
     // Adapters
-    public static IAdapterFactory getAdapterFactory(ILibraryManager libraryManager, String adapterName,
+    public static IAdapterFactory getAdapterFactory(IServiceContext serviceCtx, String adapterName,
             Map<String, String> configuration, ARecordType itemType, ARecordType metaType)
             throws HyracksDataException, AlgebricksException {
         ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
         GenericAdapterFactory adapterFactory = new GenericAdapterFactory();
         adapterFactory.setOutputType(itemType);
         adapterFactory.setMetaType(metaType);
-        adapterFactory.configure(libraryManager, configuration);
+        adapterFactory.configure(serviceCtx, configuration);
         return adapterFactory;
     }
 
     // Indexing Adapters
-    public static IIndexingAdapterFactory getIndexingAdapterFactory(ILibraryManager libraryManager, String adapterName,
+    public static IIndexingAdapterFactory getIndexingAdapterFactory(IServiceContext serviceCtx, String adapterName,
             Map<String, String> configuration, ARecordType itemType, List<ExternalFile> snapshot, boolean indexingOp,
             ARecordType metaType) throws HyracksDataException, AlgebricksException {
         ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
@@ -59,18 +59,18 @@
         adapterFactory.setOutputType(itemType);
         adapterFactory.setMetaType(metaType);
         adapterFactory.setSnapshot(snapshot, indexingOp);
-        adapterFactory.configure(libraryManager, configuration);
+        adapterFactory.configure(serviceCtx, configuration);
         return adapterFactory;
     }
 
     // Lookup Adapters
-    public static LookupAdapterFactory<?> getLookupAdapterFactory(ILibraryManager libraryManager,
+    public static LookupAdapterFactory<?> getLookupAdapterFactory(IServiceContext serviceCtx,
             Map<String, String> configuration, ARecordType recordType, int[] ridFields, boolean retainInput,
             boolean retainMissing, IMissingWriterFactory missingWriterFactory)
             throws HyracksDataException, AlgebricksException {
         LookupAdapterFactory<?> adapterFactory = new LookupAdapterFactory<>(recordType, ridFields, retainInput,
                 retainMissing, missingWriterFactory);
-        adapterFactory.configure(libraryManager, configuration);
+        adapterFactory.configure(serviceCtx, configuration);
         return adapterFactory;
     }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
index a131ae7..69f619f 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
@@ -25,18 +25,19 @@
 import org.apache.asterix.external.input.record.reader.hdfs.HDFSLookupReaderFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.HDFSUtils;
+import org.apache.hyracks.api.application.IServiceContext;
 
 public class LookupReaderFactoryProvider {
 
     @SuppressWarnings("rawtypes")
-    public static ILookupReaderFactory getLookupReaderFactory(Map<String, String> configuration)
-            throws AsterixException {
+    public static ILookupReaderFactory getLookupReaderFactory(IServiceContext serviceCtx,
+            Map<String, String> configuration) throws AsterixException {
         String inputFormat = HDFSUtils.getInputFormatClassName(configuration);
         if (inputFormat.equals(ExternalDataConstants.CLASS_NAME_TEXT_INPUT_FORMAT)
                 || inputFormat.equals(ExternalDataConstants.CLASS_NAME_SEQUENCE_INPUT_FORMAT)
                 || inputFormat.equals(ExternalDataConstants.CLASS_NAME_RC_INPUT_FORMAT)) {
-            HDFSLookupReaderFactory<Object> readerFactory = new HDFSLookupReaderFactory<Object>();
-            readerFactory.configure(configuration);
+            HDFSLookupReaderFactory<Object> readerFactory = new HDFSLookupReaderFactory<>();
+            readerFactory.configure(serviceCtx, configuration);
             return readerFactory;
         } else {
             throw new AsterixException("Unrecognized external format");
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
index def3ee2..b4353e7 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
@@ -24,6 +24,7 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -31,19 +32,19 @@
 import org.apache.asterix.external.indexing.IndexingScheduler;
 import org.apache.asterix.external.indexing.RecordId.RecordIdType;
 import org.apache.asterix.external.input.stream.HDFSInputStream;
+import org.apache.asterix.hivecompat.io.RCFileInputFormat;
+import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.asterix.hivecompat.io.RCFileInputFormat;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.context.ICCContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.exceptions.HyracksException;
@@ -51,8 +52,8 @@
 
 public class HDFSUtils {
 
-    public static Scheduler initializeHDFSScheduler() throws  HyracksDataException {
-        ICCContext ccContext = AppContextInfo.INSTANCE.getCCServiceContext().getCCContext();
+    public static Scheduler initializeHDFSScheduler(ICCServiceContext serviceCtx) throws HyracksDataException {
+        ICCContext ccContext = serviceCtx.getCCContext();
         Scheduler scheduler = null;
         try {
             scheduler = new Scheduler(ccContext.getClusterControllerInfo().getClientNetAddress(),
@@ -63,8 +64,9 @@
         return scheduler;
     }
 
-    public static IndexingScheduler initializeIndexingHDFSScheduler() throws HyracksDataException {
-        ICCContext ccContext = AppContextInfo.INSTANCE.getCCServiceContext().getCCContext();
+    public static IndexingScheduler initializeIndexingHDFSScheduler(ICCServiceContext serviceCtx)
+            throws HyracksDataException {
+        ICCContext ccContext = serviceCtx.getCCContext();
         IndexingScheduler scheduler = null;
         try {
             scheduler = new IndexingScheduler(ccContext.getClusterControllerInfo().getClientNetAddress(),
@@ -90,8 +92,8 @@
     public static InputSplit[] getSplits(JobConf conf, List<ExternalFile> files) throws IOException {
         // Create file system object
         FileSystem fs = FileSystem.get(conf);
-        ArrayList<FileSplit> fileSplits = new ArrayList<FileSplit>();
-        ArrayList<ExternalFile> orderedExternalFiles = new ArrayList<ExternalFile>();
+        ArrayList<FileSplit> fileSplits = new ArrayList<>();
+        ArrayList<ExternalFile> orderedExternalFiles = new ArrayList<>();
         // Create files splits
         for (ExternalFile file : files) {
             Path filePath = new Path(file.getFileName());
@@ -202,11 +204,11 @@
         return conf;
     }
 
-    public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(
+    public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(IApplicationContext appCtx,
             AlgebricksAbsolutePartitionConstraint clusterLocations) {
         if (clusterLocations == null) {
             ArrayList<String> locs = new ArrayList<>();
-            Map<String, String[]> stores = AppContextInfo.INSTANCE.getMetadataProperties().getStores();
+            Map<String, String[]> stores = appCtx.getMetadataProperties().getStores();
             for (String node : stores.keySet()) {
                 int numIODevices = ClusterStateManager.INSTANCE.getIODevices(node).length;
                 for (int k = 0; k < numIODevices; k++) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java
index bda5f1e..9a4ddff 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.util;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.external.api.INodeResolver;
 
 /**
@@ -27,7 +28,7 @@
 public class IdentityResolver implements INodeResolver {
 
     @Override
-    public String resolveNode(String value) {
+    public String resolveNode(ICcApplicationContext appCtx, String value) {
         return value;
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java
index 84346c4..5b15e9e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java
@@ -27,6 +27,7 @@
 import java.util.Random;
 import java.util.Set;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.external.api.INodeResolver;
@@ -42,15 +43,15 @@
     private static final Set<String> ncs = new HashSet<>();
 
     @Override
-    public String resolveNode(String value) throws AsterixException {
+    public String resolveNode(ICcApplicationContext appCtx, String value) throws AsterixException {
         try {
             if (ncMap.isEmpty()) {
-                NodeResolver.updateNCs();
+                NodeResolver.updateNCs(appCtx);
             }
             if (ncs.contains(value)) {
                 return value;
             } else {
-                NodeResolver.updateNCs();
+                NodeResolver.updateNCs(appCtx);
                 if (ncs.contains(value)) {
                     return value;
                 }
@@ -71,10 +72,10 @@
         }
     }
 
-    private static void updateNCs() throws Exception {
+    private static void updateNCs(ICcApplicationContext appCtx) throws Exception {
         synchronized (ncMap) {
             ncMap.clear();
-            RuntimeUtils.getNodeControllerMap(ncMap);
+            RuntimeUtils.getNodeControllerMap(appCtx, ncMap);
             synchronized (ncs) {
                 ncs.clear();
                 for (Entry<InetAddress, Set<String>> entry : ncMap.entrySet()) {
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
index 3467411..12652f3 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
@@ -20,27 +20,32 @@
 
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.input.record.RecordWithPK;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 
 public class RecordWithPKTestReaderFactory implements IRecordReaderFactory<RecordWithPK<char[]>> {
 
     private static final long serialVersionUID = 1L;
     private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    private transient IServiceContext serviceCtx;
 
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
-        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
+        clusterLocations = IExternalDataSourceFactory
+                .getPartitionConstraints((IApplicationContext) serviceCtx.getApplicationContext(), clusterLocations, 1);
         return clusterLocations;
     }
 
     @Override
-    public void configure(final Map<String, String> configuration) {
+    public void configure(IServiceContext serviceCtx, final Map<String, String> configuration) {
+        this.serviceCtx = serviceCtx;
     }
 
     @Override
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
index 98105b2..d2e0281 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
@@ -24,6 +24,7 @@
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 
 import com.couchbase.client.core.message.dcp.DCPRequest;
@@ -48,7 +49,7 @@
     }
 
     @Override
-    public void configure(final Map<String, String> configuration) {
+    public void configure(IServiceContext serviceCtx, final Map<String, String> configuration) {
         if (configuration.containsKey("num-of-records")) {
             numOfRecords = Integer.parseInt(configuration.get("num-of-records"));
         }
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index a108908..1c28940 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -22,9 +22,8 @@
 import java.io.InputStream;
 import java.util.Map;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
-import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
@@ -36,6 +35,7 @@
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -55,6 +55,8 @@
 
     private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
 
+    private transient IServiceContext serviceContext;
+
     @Override
     public String getAlias() {
         return "test_typed";
@@ -62,7 +64,8 @@
 
     @Override
     public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
-        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
+        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+                (IApplicationContext) serviceContext.getApplicationContext(), clusterLocations, 1);
         return clusterLocations;
     }
 
@@ -77,10 +80,9 @@
                 ADMDataParser parser;
                 ITupleForwarder forwarder;
                 ArrayTupleBuilder tb;
-                IPropertiesProvider propertiesProvider =
-                        (IPropertiesProvider) ctx.getJobletContext().getServiceContext().getApplicationContext();
-                ClusterPartition nodePartition =
-                        propertiesProvider.getMetadataProperties().getNodePartitions().get(nodeId)[0];
+                IApplicationContext appCtx =
+                        (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+                ClusterPartition nodePartition = appCtx.getMetadataProperties().getNodePartitions().get(nodeId)[0];
                 parser = new ADMDataParser(outputType, true);
                 forwarder = DataflowUtils.getTupleForwarder(configuration,
                         FeedUtils.getFeedLogManager(ctx,
@@ -118,7 +120,8 @@
     }
 
     @Override
-    public void configure(ILibraryManager context, Map<String, String> configuration) {
+    public void configure(IServiceContext serviceContext, Map<String, String> configuration) {
+        this.serviceContext = serviceContext;
         this.configuration = configuration;
     }
 
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 7a04ce8..2f30ff1 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -19,19 +19,19 @@
 
 package org.apache.asterix.metadata;
 
-import java.io.IOException;
 import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.dataflow.LSMIndexUtil;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -101,6 +101,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
@@ -112,9 +113,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
@@ -123,7 +121,7 @@
 
 public class MetadataNode implements IMetadataNode {
     private static final long serialVersionUID = 1L;
-
+    private static final Logger LOGGER = Logger.getLogger(MetadataNode.class.getName());
     private static final DatasetId METADATA_DATASET_ID =
             new ImmutableDatasetId(MetadataPrimaryIndexes.PROPERTIES_METADATA.getDatasetId());
 
@@ -142,13 +140,12 @@
         super();
     }
 
-    public void initialize(IAppRuntimeContext runtimeContext,
+    public void initialize(INcApplicationContext runtimeContext,
             MetadataTupleTranslatorProvider tupleTranslatorProvider, List<IMetadataExtension> metadataExtensions) {
         this.tupleTranslatorProvider = tupleTranslatorProvider;
         this.transactionSubsystem = runtimeContext.getTransactionSubsystem();
         this.datasetLifecycleManager = runtimeContext.getDatasetLifecycleManager();
-        this.metadataStoragePartition = ((IPropertiesProvider) runtimeContext).getMetadataProperties()
-                .getMetadataPartition().getPartitionId();
+        this.metadataStoragePartition = runtimeContext.getMetadataProperties().getMetadataPartition().getPartitionId();
         if (metadataExtensions != null) {
             extensionDatasets = new HashMap<>();
             for (IMetadataExtension metadataExtension : metadataExtensions) {
@@ -174,11 +171,11 @@
     @Override
     public void abortTransaction(JobId jobId) throws RemoteException, ACIDException {
         try {
-            ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId,
-                    false);
+            ITransactionContext txnCtx =
+                    transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
             transactionSubsystem.getTransactionManager().abortTransaction(txnCtx, DatasetId.NULL, -1);
         } catch (ACIDException e) {
-            e.printStackTrace();
+            LOGGER.log(Level.WARNING, "Exception aborting transaction", e);
             throw e;
         }
     }
@@ -210,9 +207,7 @@
         try {
             ITupleReference tuple = tupleTranslator.getTupleFromMetadataEntity(entity);
             insertTupleIntoIndex(jobId, index, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException(entity.toString() + " already exists.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException | ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -231,9 +226,7 @@
         try {
             ITupleReference tuple = tupleTranslator.getTupleFromMetadataEntity(entity);
             deleteTupleFromIndex(jobId, index, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException(entity.toString() + " already exists.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException | ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -247,15 +240,17 @@
      * @param index
      * @return
      * @throws MetadataException
+     * @throws RemoteException
      */
     private <T> List<T> getEntities(JobId jobId, ITupleReference searchKey,
-            IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index) throws MetadataException {
+            IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index)
+            throws MetadataException, RemoteException {
         try {
             IValueExtractor<T> valueExtractor = new MetadataEntityValueExtractor<>(tupleTranslator);
             List<T> results = new ArrayList<>();
             searchIndex(jobId, index, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -288,8 +283,8 @@
     @Override
     public <T extends IExtensionMetadataEntity> List<T> getEntities(JobId jobId, IExtensionMetadataSearchKey searchKey)
             throws MetadataException, RemoteException {
-        ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets
-                .get(searchKey.getDatasetId());
+        ExtensionMetadataDataset<T> index =
+                (ExtensionMetadataDataset<T>) extensionDatasets.get(searchKey.getDatasetId());
         if (index == null) {
             throw new MetadataException("Metadata Extension Index: " + searchKey.getDatasetId() + " was not found");
         }
@@ -303,10 +298,14 @@
             DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(dataverse);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException(
-                    "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException(
+                        "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -328,10 +327,14 @@
 
                 addIndex(jobId, primaryIndex);
             }
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("A dataset with this name " + dataset.getDatasetName()
-                    + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A dataset with this name " + dataset.getDatasetName()
+                        + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -342,9 +345,13 @@
             IndexTupleTranslator tupleWriter = tupleTranslatorProvider.getIndexTupleTranslator(jobId, this, true);
             ITupleReference tuple = tupleWriter.getTupleFromMetadataEntity(index);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("An index with name '" + index.getIndexName() + "' already exists.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("An index with name '" + index.getIndexName() + "' already exists.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -355,9 +362,13 @@
             NodeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeTupleTranslator(true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(node);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODE_DATASET, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("A node with name '" + node.getNodeName() + "' already exists.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A node with name '" + node.getNodeName() + "' already exists.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -368,10 +379,14 @@
             NodeGroupTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeGroupTupleTranslator(true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(nodeGroup);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.",
-                    e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException(
+                        "A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -379,13 +394,18 @@
     @Override
     public void addDatatype(JobId jobId, Datatype datatype) throws MetadataException, RemoteException {
         try {
-            DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
-                    true);
+            DatatypeTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, true);
             ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(datatype);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.",
+                        e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -395,19 +415,25 @@
         try {
             // Insert into the 'function' dataset.
             FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(true);
+
             ITupleReference functionTuple = tupleReaderWriter.getTupleFromMetadataEntity(function);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
 
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("A function with this name " + function.getName() + " and arity "
-                    + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A function with this name " + function.getName() + " and arity "
+                        + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.",
+                        e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
 
     private void insertTupleIntoIndex(JobId jobId, IMetadataIndex metadataIndex, ITupleReference tuple)
-            throws ACIDException, HyracksDataException, IndexException {
+            throws ACIDException, HyracksDataException {
         long resourceID = metadataIndex.getResourceId();
         String resourceName = metadataIndex.getFile().getRelativePath();
         ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.get(resourceName);
@@ -415,13 +441,13 @@
             datasetLifecycleManager.open(resourceName);
 
             // prepare a Callback for logging
-            IModificationOperationCallback modCallback = createIndexModificationCallback(jobId, resourceID,
-                    metadataIndex, lsmIndex, Operation.INSERT);
+            IModificationOperationCallback modCallback =
+                    createIndexModificationCallback(jobId, resourceID, metadataIndex, lsmIndex, Operation.INSERT);
 
             ILSMIndexAccessor indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
 
-            ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId,
-                    false);
+            ITransactionContext txnCtx =
+                    transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
             txnCtx.setWriteTxn(true);
             txnCtx.registerIndexAndCallback(resourceID, lsmIndex, (AbstractOperationCallback) modCallback,
                     metadataIndex.isPrimaryIndex());
@@ -520,9 +546,15 @@
 
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.",
+                        e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -530,12 +562,7 @@
     @Override
     public void dropDataset(JobId jobId, String dataverseName, String datasetName)
             throws MetadataException, RemoteException {
-        Dataset dataset;
-        try {
-            dataset = getDataset(jobId, dataverseName, datasetName);
-        } catch (Exception e) {
-            throw new MetadataException(e);
-        }
+        Dataset dataset = getDataset(jobId, dataverseName, datasetName);
         if (dataset == null) {
             throw new MetadataException("Cannot drop dataset '" + datasetName + "' because it doesn't exist.");
         }
@@ -568,14 +595,17 @@
                         }
                     }
                 }
-            } catch (TreeIndexException tie) {
+            } catch (HyracksDataException hde) {
                 // ignore this exception and continue deleting all relevant
                 // artifacts.
+                if (!hde.getComponent().equals(ErrorCode.HYRACKS)
+                        || hde.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                    throw new MetadataException(hde);
+                }
             } finally {
                 deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
             }
-
-        } catch (Exception e) {
+        } catch (HyracksDataException | ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -591,10 +621,15 @@
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException(
-                    "Cannot drop index '" + datasetName + "." + indexName + "' because it doesn't exist.", e);
-        } catch (Exception e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException(
+                        "Cannot drop index '" + datasetName + "." + indexName + "' because it doesn't exist.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -602,11 +637,7 @@
     @Override
     public void dropNodegroup(JobId jobId, String nodeGroupName) throws MetadataException, RemoteException {
         List<String> datasetNames;
-        try {
-            datasetNames = getDatasetNamesPartitionedOnThisNodeGroup(jobId, nodeGroupName);
-        } catch (Exception e) {
-            throw new MetadataException(e);
-        }
+        datasetNames = getDatasetNamesPartitionedOnThisNodeGroup(jobId, nodeGroupName);
         if (!datasetNames.isEmpty()) {
             StringBuilder sb = new StringBuilder();
             sb.append("Nodegroup '" + nodeGroupName
@@ -624,9 +655,15 @@
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple);
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop nodegroup '" + nodeGroupName + "' because it doesn't exist", e);
-        } catch (Exception e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop nodegroup '" + nodeGroupName + "' because it doesn't exist",
+                        e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -655,14 +692,20 @@
 
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
-        } catch (Exception e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
 
-    private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName) throws MetadataException {
+    private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName)
+            throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, datatypeName);
             // Searches the index for the tuple to be deleted. Acquires an S
@@ -671,27 +714,32 @@
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
 
     private void deleteTupleFromIndex(JobId jobId, IMetadataIndex metadataIndex, ITupleReference tuple)
-            throws ACIDException, HyracksDataException, IndexException {
+            throws ACIDException, HyracksDataException {
         long resourceID = metadataIndex.getResourceId();
         String resourceName = metadataIndex.getFile().getRelativePath();
         ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.get(resourceName);
         try {
             datasetLifecycleManager.open(resourceName);
             // prepare a Callback for logging
-            IModificationOperationCallback modCallback = createIndexModificationCallback(jobId, resourceID,
-                    metadataIndex, lsmIndex, Operation.DELETE);
+            IModificationOperationCallback modCallback =
+                    createIndexModificationCallback(jobId, resourceID, metadataIndex, lsmIndex, Operation.DELETE);
             ILSMIndexAccessor indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
 
-            ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId,
-                    false);
+            ITransactionContext txnCtx =
+                    transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
             txnCtx.setWriteTxn(true);
             txnCtx.registerIndexAndCallback(resourceID, lsmIndex, (AbstractOperationCallback) modCallback,
                     metadataIndex.isPrimaryIndex());
@@ -712,7 +760,7 @@
             List<Dataverse> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, null, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -729,7 +777,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -744,7 +792,7 @@
             List<Dataset> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -758,7 +806,7 @@
             List<Feed> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -773,21 +821,22 @@
             List<Library> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
 
-    private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName) throws MetadataException {
+    private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName)
+            throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName);
-            DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
-                    false);
+            DatatypeTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
             IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Datatype> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -805,12 +854,12 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
 
-    public List<Dataset> getAllDatasets(JobId jobId) throws MetadataException {
+    public List<Dataset> getAllDatasets(JobId jobId) throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = null;
             DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
@@ -818,26 +867,27 @@
             List<Dataset> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
 
-    public List<Datatype> getAllDatatypes(JobId jobId) throws MetadataException {
+    public List<Datatype> getAllDatatypes(JobId jobId) throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = null;
-            DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
-                    false);
+            DatatypeTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
             IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Datatype> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
 
-    private void confirmDataverseCanBeDeleted(JobId jobId, String dataverseName) throws MetadataException {
+    private void confirmDataverseCanBeDeleted(JobId jobId, String dataverseName)
+            throws MetadataException, RemoteException {
         //If a dataset from a DIFFERENT dataverse
         //uses a type from this dataverse
         //throw an error
@@ -860,7 +910,7 @@
     }
 
     private void confirmDatatypeIsUnusedByDatasets(JobId jobId, String dataverseName, String datatypeName)
-            throws MetadataException {
+            throws MetadataException, RemoteException {
         //If any dataset uses this type, throw an error
         List<Dataset> datasets = getAllDatasets(jobId);
         for (Dataset set : datasets) {
@@ -920,7 +970,7 @@
     }
 
     public List<String> getDatasetNamesPartitionedOnThisNodeGroup(JobId jobId, String nodegroup)
-            throws MetadataException {
+            throws MetadataException, RemoteException {
         //this needs to scan the datasets and return the datasets that use this nodegroup
         List<String> nodeGroupDatasets = new ArrayList<>();
         List<Dataset> datasets = getAllDatasets(jobId);
@@ -938,8 +988,8 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, datasetName, indexName);
-            IndexTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getIndexTupleTranslator(jobId, this,
-                    false);
+            IndexTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getIndexTupleTranslator(jobId, this, false);
             IValueExtractor<Index> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Index> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, searchKey, valueExtractor, results);
@@ -947,7 +997,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -957,13 +1007,13 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, datasetName);
-            IndexTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getIndexTupleTranslator(jobId, this,
-                    false);
+            IndexTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getIndexTupleTranslator(jobId, this, false);
             IValueExtractor<Index> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Index> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -973,8 +1023,8 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, datatypeName);
-            DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
-                    false);
+            DatatypeTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
             IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Datatype> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
@@ -982,7 +1032,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -999,7 +1049,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1018,7 +1068,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1039,29 +1089,34 @@
                     "" + functionSignature.getArity());
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'function' dataset.
-            ITupleReference functionTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET,
-                    searchKey);
+            ITupleReference functionTuple =
+                    getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
 
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("There is no function with the name " + functionSignature.getName()
-                    + " and arity " + functionSignature.getArity(), e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("There is no function with the name " + functionSignature.getName()
+                        + " and arity " + functionSignature.getArity(), e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
 
     private ITupleReference getTupleToBeDeleted(JobId jobId, IMetadataIndex metadataIndex, ITupleReference searchKey)
-            throws MetadataException, IndexException, IOException {
+            throws MetadataException, HyracksDataException, RemoteException {
         IValueExtractor<ITupleReference> valueExtractor = new TupleCopyValueExtractor(metadataIndex.getTypeTraits());
         List<ITupleReference> results = new ArrayList<>();
         searchIndex(jobId, metadataIndex, searchKey, valueExtractor, results);
         if (results.isEmpty()) {
             // TODO: Temporarily a TreeIndexException to make it get caught by
             // caller in the appropriate catch block.
-            throw new TreeIndexException("Could not find entry to be deleted.");
+            throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
         }
         // There should be exactly one result returned from the search.
         return results.get(0);
@@ -1076,8 +1131,8 @@
             String resourceName = index.getFile().toString();
             IIndex indexInstance = datasetLifecycleManager.get(resourceName);
             datasetLifecycleManager.open(resourceName);
-            IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            IIndexAccessor indexAccessor =
+                    indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
 
             RangePredicate rangePred = null;
@@ -1086,9 +1141,8 @@
             try {
                 while (rangeCursor.hasNext()) {
                     rangeCursor.next();
-                    sb.append(TupleUtils.printTuple(rangeCursor.getTuple(),
-                            new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE
-                                    .getSerializerDeserializer(BuiltinType.ASTRING) }));
+                    sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] {
+                            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
                 }
             } finally {
                 rangeCursor.close();
@@ -1098,8 +1152,8 @@
             index = MetadataPrimaryIndexes.DATASET_DATASET;
             indexInstance = datasetLifecycleManager.get(resourceName);
             datasetLifecycleManager.open(resourceName);
-            indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            indexAccessor =
+                    indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
 
             rangePred = null;
@@ -1108,12 +1162,9 @@
             try {
                 while (rangeCursor.hasNext()) {
                     rangeCursor.next();
-                    sb.append(TupleUtils.printTuple(rangeCursor.getTuple(),
-                            new ISerializerDeserializer[] {
-                                    SerializerDeserializerProvider.INSTANCE
-                                            .getSerializerDeserializer(BuiltinType.ASTRING),
-                                    SerializerDeserializerProvider.INSTANCE
-                                            .getSerializerDeserializer(BuiltinType.ASTRING) }));
+                    sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] {
+                            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING),
+                            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
                 }
             } finally {
                 rangeCursor.close();
@@ -1123,8 +1174,8 @@
             index = MetadataPrimaryIndexes.INDEX_DATASET;
             indexInstance = datasetLifecycleManager.get(resourceName);
             datasetLifecycleManager.open(resourceName);
-            indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            indexAccessor =
+                    indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
 
             rangePred = null;
@@ -1136,14 +1187,14 @@
                     sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] {
                             SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING),
                             SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING),
-                            SerializerDeserializerProvider.INSTANCE
-                                    .getSerializerDeserializer(BuiltinType.ASTRING) }));
+                            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
                 }
             } finally {
                 rangeCursor.close();
             }
             datasetLifecycleManager.close(resourceName);
         } catch (Exception e) {
+            // Debugging method
             e.printStackTrace();
         }
         return sb.toString();
@@ -1151,7 +1202,7 @@
 
     private <ResultType> void searchIndex(JobId jobId, IMetadataIndex index, ITupleReference searchKey,
             IValueExtractor<ResultType> valueExtractor, List<ResultType> results)
-            throws MetadataException, IndexException, IOException {
+            throws MetadataException, HyracksDataException, RemoteException {
         IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
         if (index.getFile() == null) {
             throw new MetadataException("No file for Index " + index.getDataverseName() + "." + index.getIndexName());
@@ -1159,8 +1210,8 @@
         String resourceName = index.getFile().getRelativePath();
         IIndex indexInstance = datasetLifecycleManager.get(resourceName);
         datasetLifecycleManager.open(resourceName);
-        IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
 
         IBinaryComparator[] searchCmps = null;
@@ -1198,8 +1249,8 @@
             IIndex indexInstance = datasetLifecycleManager.get(resourceName);
             datasetLifecycleManager.open(resourceName);
             try {
-                IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
-                        NoOpOperationCallback.INSTANCE);
+                IIndexAccessor indexAccessor =
+                        indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
                 IIndexCursor rangeCursor = indexAccessor.createSearchCursor(false);
 
                 DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
@@ -1226,7 +1277,7 @@
                 datasetLifecycleManager.close(resourceName);
             }
 
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
 
@@ -1237,8 +1288,8 @@
     // Hyracks version.
     public static ITupleReference createTuple(String... fields) {
         @SuppressWarnings("unchecked")
-        ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
-                .getSerializerDeserializer(BuiltinType.ASTRING);
+        ISerializerDeserializer<AString> stringSerde =
+                SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
         AMutableString aString = new AMutableString("");
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
         for (String s : fields) {
@@ -1266,7 +1317,7 @@
             List<Function> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1275,17 +1326,18 @@
     public void addAdapter(JobId jobId, DatasourceAdapter adapter) throws MetadataException, RemoteException {
         try {
             // Insert into the 'Adapter' dataset.
-            DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getAdapterTupleTranslator(true);
+            DatasourceAdapterTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getAdapterTupleTranslator(true);
             ITupleReference adapterTuple = tupleReaderWriter.getTupleFromMetadataEntity(adapter);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, adapterTuple);
-
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException(
-                    "A adapter with this name " + adapter.getAdapterIdentifier().getName()
-                            + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.",
-                    e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A adapter with this name " + adapter.getAdapterIdentifier().getName()
+                        + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1302,15 +1354,20 @@
             ITupleReference searchKey = createTuple(dataverseName, adapterName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'Adapter' dataset.
-            ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET,
-                    searchKey);
+            ITupleReference datasetTuple =
+                    getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, datasetTuple);
 
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop adapter '" + adapterName, e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop adapter '" + adapterName + " since it doesn't exist", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
 
@@ -1321,8 +1378,8 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, adapterName);
-            DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getAdapterTupleTranslator(false);
+            DatasourceAdapterTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getAdapterTupleTranslator(false);
             List<DatasourceAdapter> results = new ArrayList<>();
             IValueExtractor<DatasourceAdapter> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             searchIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey, valueExtractor, results);
@@ -1330,7 +1387,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1340,15 +1397,18 @@
             throws MetadataException, RemoteException {
         try {
             // Insert into the 'CompactionPolicy' dataset.
-            CompactionPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getCompactionPolicyTupleTranslator(true);
+            CompactionPolicyTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getCompactionPolicyTupleTranslator(true);
             ITupleReference compactionPolicyTuple = tupleReaderWriter.getTupleFromMetadataEntity(compactionPolicy);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, compactionPolicyTuple);
-
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("A compcation policy with this name " + compactionPolicy.getPolicyName()
-                    + " already exists in dataverse '" + compactionPolicy.getPolicyName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A compcation policy with this name " + compactionPolicy.getPolicyName()
+                        + " already exists in dataverse '" + compactionPolicy.getPolicyName() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1358,8 +1418,8 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverse, policyName);
-            CompactionPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getCompactionPolicyTupleTranslator(false);
+            CompactionPolicyTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getCompactionPolicyTupleTranslator(false);
             List<CompactionPolicy> results = new ArrayList<>();
             IValueExtractor<CompactionPolicy> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             searchIndex(jobId, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, searchKey, valueExtractor, results);
@@ -1367,7 +1427,7 @@
                 return results.get(0);
             }
             return null;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1377,13 +1437,13 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName);
-            DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getAdapterTupleTranslator(false);
+            DatasourceAdapterTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getAdapterTupleTranslator(false);
             IValueExtractor<DatasourceAdapter> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<DatasourceAdapter> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1396,10 +1456,14 @@
             ITupleReference libraryTuple = tupleReaderWriter.getTupleFromMetadataEntity(library);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, libraryTuple);
 
-        } catch (TreeIndexException e) {
-            throw new MetadataException("A library with this name " + library.getDataverseName()
-                    + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A library with this name " + library.getDataverseName()
+                        + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1416,15 +1480,20 @@
             ITupleReference searchKey = createTuple(dataverseName, libraryName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'Adapter' dataset.
-            ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET,
-                    searchKey);
+            ITupleReference datasetTuple =
+                    getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, datasetTuple);
 
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop library '" + libraryName, e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop library '" + libraryName, e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
 
@@ -1443,7 +1512,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1460,11 +1529,14 @@
             FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(true);
             ITupleReference feedPolicyTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedPolicy);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, feedPolicyTuple);
-
-        } catch (TreeIndexException e) {
-            throw new MetadataException("A feed policy with this name " + feedPolicy.getPolicyName()
-                    + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A feed policy with this name " + feedPolicy.getPolicyName()
+                        + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1482,7 +1554,7 @@
                 return results.get(0);
             }
             return null;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1493,14 +1565,14 @@
             FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(true);
             ITupleReference feedConnTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedConnection);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, feedConnTuple);
-        } catch (IndexException | ACIDException | IOException e) {
+        } catch (HyracksDataException | ACIDException e) {
             throw new MetadataException(e);
         }
     }
 
     @Override
     public List<FeedConnection> getFeedConnections(JobId jobId, String dataverseName, String feedName)
-            throws MetadataException {
+            throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, feedName);
             FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(false);
@@ -1508,14 +1580,14 @@
             IValueExtractor<FeedConnection> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
 
     @Override
     public FeedConnection getFeedConnection(JobId jobId, String dataverseName, String feedName, String datasetName)
-            throws MetadataException {
+            throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
             FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(false);
@@ -1526,20 +1598,20 @@
                 return results.get(0);
             }
             return null;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
 
     @Override
     public void dropFeedConnection(JobId jobId, String dataverseName, String feedName, String datasetName)
-            throws MetadataException {
+            throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
-            ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET,
-                    searchKey);
+            ITupleReference tuple =
+                    getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, tuple);
-        } catch (IndexException | IOException | ACIDException e) {
+        } catch (HyracksDataException | ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1551,11 +1623,14 @@
             FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(true);
             ITupleReference feedTuple = tupleReaderWriter.getTupleFromMetadataEntity(feed);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, feedTuple);
-
-        } catch (TreeIndexException e) {
-            throw new MetadataException("A feed with this name " + feed.getFeedName() + " already exists in dataverse '"
-                    + feed.getDataverseName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("A feed with this name " + feed.getFeedName()
+                        + " already exists in dataverse '" + feed.getDataverseName() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1572,7 +1647,7 @@
                 return results.get(0);
             }
             return null;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1587,9 +1662,14 @@
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, tuple);
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1601,9 +1681,14 @@
             ITupleReference searchKey = createTuple(dataverseName, policyName);
             ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, tuple);
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Unknown feed policy " + policyName, e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Unknown feed policy " + policyName, e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1618,7 +1703,7 @@
             List<FeedPolicyEntity> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1627,15 +1712,19 @@
     public void addExternalFile(JobId jobId, ExternalFile externalFile) throws MetadataException, RemoteException {
         try {
             // Insert into the 'externalFiles' dataset.
-            ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getExternalFileTupleTranslator(true);
+            ExternalFileTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getExternalFileTupleTranslator(true);
             ITupleReference externalFileTuple = tupleReaderWriter.getTupleFromMetadataEntity(externalFile);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, externalFileTuple);
-        } catch (TreeIndexDuplicateKeyException e) {
-            throw new MetadataException("An externalFile with this number " + externalFile.getFileNumber()
-                    + " already exists in dataset '" + externalFile.getDatasetName() + "' in dataverse '"
-                    + externalFile.getDataverseName() + "'.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+                throw new MetadataException("An externalFile with this number " + externalFile.getFileNumber()
+                        + " already exists in dataset '" + externalFile.getDatasetName() + "' in dataverse '"
+                        + externalFile.getDataverseName() + "'.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1644,13 +1733,13 @@
     public List<ExternalFile> getExternalFiles(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
-            ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getExternalFileTupleTranslator(false);
+            ExternalFileTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getExternalFileTupleTranslator(false);
             IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<ExternalFile> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1663,12 +1752,17 @@
             ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'ExternalFile' dataset.
-            ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET,
-                    searchKey);
+            ITupleReference datasetTuple =
+                    getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, datasetTuple);
-        } catch (TreeIndexException e) {
-            throw new MetadataException("Couldn't drop externalFile.", e);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException e) {
+            if (e.getComponent().equals(ErrorCode.HYRACKS)
+                    && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                throw new MetadataException("Couldn't drop externalFile.", e);
+            } else {
+                throw new MetadataException(e);
+            }
+        } catch (ACIDException e) {
             throw new MetadataException(e);
         }
     }
@@ -1687,10 +1781,10 @@
     @SuppressWarnings("unchecked")
     public ITupleReference createExternalFileSearchTuple(String dataverseName, String datasetName, int fileNumber)
             throws HyracksDataException {
-        ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
-                .getSerializerDeserializer(BuiltinType.ASTRING);
-        ISerializerDeserializer<AInt32> intSerde = SerializerDeserializerProvider.INSTANCE
-                .getSerializerDeserializer(BuiltinType.AINT32);
+        ISerializerDeserializer<AString> stringSerde =
+                SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
+        ISerializerDeserializer<AInt32> intSerde =
+                SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
 
         AMutableString aString = new AMutableString("");
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(3);
@@ -1719,8 +1813,8 @@
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
-            ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider
-                    .getExternalFileTupleTranslator(false);
+            ExternalFileTupleTranslator tupleReaderWriter =
+                    tupleTranslatorProvider.getExternalFileTupleTranslator(false);
             IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<ExternalFile> results = new ArrayList<>();
             searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
@@ -1728,7 +1822,7 @@
                 return null;
             }
             return results.get(0);
-        } catch (IndexException | IOException e) {
+        } catch (HyracksDataException e) {
             throw new MetadataException(e);
         }
     }
@@ -1742,15 +1836,15 @@
             searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'dataset' dataset.
-            ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET,
-                    searchKey);
+            ITupleReference datasetTuple =
+                    getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey);
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
             // Previous tuple was deleted
             // Insert into the 'dataset' dataset.
             DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(true);
             datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
-        } catch (ACIDException | IndexException | IOException e) {
+        } catch (HyracksDataException | ACIDException e) {
             throw new MetadataException(e);
         }
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java
index 9ead1ee..4acd673 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java
@@ -21,6 +21,7 @@
 import java.util.Set;
 
 import org.apache.asterix.common.api.IClusterEventsSubscriber;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.event.schema.cluster.Node;
 
@@ -30,7 +31,7 @@
      * @param node
      * @throws AsterixException
      */
-    public void addNode(Node node) throws AsterixException;
+    public void addNode(ICcApplicationContext appCtx, Node node) throws AsterixException;
 
     /**
      * @param node
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
index d8ccbde..9a9f18d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
@@ -21,8 +21,10 @@
 
 import java.io.IOException;
 import java.io.Serializable;
+import java.rmi.RemoteException;
 
 import org.apache.asterix.metadata.MetadataException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -45,7 +47,8 @@
      * @throws MetadataException
      * @throws IOException
      */
-    public T getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException;
+    public T getMetadataEntityFromTuple(ITupleReference tuple)
+            throws MetadataException, HyracksDataException, RemoteException;
 
     /**
      * Serializes the given metadata entity of type T into an appropriate tuple
@@ -53,7 +56,7 @@
      *
      * @param metadataEntity
      *            Metadata entity to be written into a tuple.
-     * @throws IOException
+     * @throws HyracksDataException
      */
-    public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws MetadataException, IOException;
+    public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws MetadataException, HyracksDataException;
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
index e90e29b..c8db613 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
@@ -20,6 +20,7 @@
 package org.apache.asterix.metadata.api;
 
 import java.io.IOException;
+import java.rmi.RemoteException;
 
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
@@ -46,5 +47,6 @@
      * @throws HyracksDataException
      * @throws IOException
      */
-    public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException;
+    public T getValue(JobId jobId, ITupleReference tuple)
+            throws MetadataException, HyracksDataException, RemoteException;
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index 15a2783..3170a68 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -26,7 +26,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.config.ClusterProperties;
@@ -106,7 +106,7 @@
 public class MetadataBootstrap {
     public static final boolean IS_DEBUG_MODE = false;
     private static final Logger LOGGER = Logger.getLogger(MetadataBootstrap.class.getName());
-    private static IAppRuntimeContext appContext;
+    private static INcApplicationContext appContext;
     private static IBufferCache bufferCache;
     private static IFileMapProvider fileMapProvider;
     private static IDatasetLifecycleManager dataLifecycleManager;
@@ -142,7 +142,7 @@
     public static void startUniverse(INCServiceContext ncServiceContext, boolean isNewUniverse)
             throws RemoteException, ACIDException, MetadataException {
         MetadataBootstrap.setNewUniverse(isNewUniverse);
-        appContext = (IAppRuntimeContext) ncServiceContext.getApplicationContext();
+        appContext = (INcApplicationContext) ncServiceContext.getApplicationContext();
 
         MetadataProperties metadataProperties = appContext.getMetadataProperties();
         metadataNodeName = metadataProperties.getMetadataNodeName();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java
index 5bfe876..1b4f5dc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java
@@ -31,8 +31,9 @@
 
 import org.apache.asterix.common.api.IClusterEventsSubscriber;
 import org.apache.asterix.common.api.IClusterManagementWork;
-import org.apache.asterix.common.config.ExternalProperties;
 import org.apache.asterix.common.config.ClusterProperties;
+import org.apache.asterix.common.config.ExternalProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.event.management.AsterixEventServiceClient;
 import org.apache.asterix.event.model.AsterixInstance;
@@ -47,7 +48,6 @@
 import org.apache.asterix.event.util.PatternCreator;
 import org.apache.asterix.installer.schema.conf.Configuration;
 import org.apache.asterix.metadata.api.IClusterManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 
 public class ClusterManager implements IClusterManager {
 
@@ -91,17 +91,17 @@
     }
 
     @Override
-    public void addNode(Node node) throws AsterixException {
+    public void addNode(ICcApplicationContext appCtx, Node node) throws AsterixException {
         try {
             Cluster cluster = ClusterProperties.INSTANCE.getCluster();
-            List<Pattern> pattern = new ArrayList<Pattern>();
-            String asterixInstanceName = AppContextInfo.INSTANCE.getMetadataProperties().getInstanceName();
+            List<Pattern> pattern = new ArrayList<>();
+            String asterixInstanceName = appCtx.getMetadataProperties().getInstanceName();
             Patterns prepareNode = PatternCreator.INSTANCE.createPrepareNodePattern(asterixInstanceName,
                     ClusterProperties.INSTANCE.getCluster(), node);
             cluster.getNode().add(node);
             client.submit(prepareNode);
 
-            ExternalProperties externalProps = AppContextInfo.INSTANCE.getExternalProperties();
+            ExternalProperties externalProps = appCtx.getExternalProperties();
             AsterixEventServiceUtil.poulateClusterEnvironmentProperties(cluster, externalProps.getCCJavaParams(),
                     externalProps.getNCJavaParams());
 
@@ -110,8 +110,8 @@
             String hostId = node.getId();
             String nodeControllerId = asterixInstanceName + "_" + node.getId();
             String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
-            Pattern startNC = PatternCreator.INSTANCE.createNCStartPattern(ccHost, hostId, nodeControllerId, iodevices,
-                    false);
+            Pattern startNC =
+                    PatternCreator.INSTANCE.createNCStartPattern(ccHost, hostId, nodeControllerId, iodevices, false);
             pattern.add(startNC);
             Patterns startNCPattern = new Patterns(pattern);
             client.submit(startNCPattern);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java
index cbb3229..d11e70d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java
@@ -23,6 +23,7 @@
 
 import org.apache.asterix.common.api.IClusterEventsSubscriber;
 import org.apache.asterix.common.config.ClusterProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.event.schema.cluster.Cluster;
 import org.apache.asterix.event.schema.cluster.Node;
@@ -57,7 +58,7 @@
     }
     private static class NoopClusterManager implements IClusterManager {
         @Override
-        public void addNode(Node node) throws AsterixException {
+        public void addNode(ICcApplicationContext appCtx, Node node) throws AsterixException {
             // no-op
         }
 
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java
index 90c8b1e..277587f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java
@@ -21,7 +21,7 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 
 /**
@@ -42,19 +42,19 @@
      *         first element as a boolean that represents the validation result.
      *         second element as the error message if the validation result is false
      */
-    public static Pair<Boolean, String> validate(String hintName, String value) {
+    public static Pair<Boolean, String> validate(ICcApplicationContext appCtx, String hintName, String value) {
         for (IHint h : hints) {
             if (h.getName().equalsIgnoreCase(hintName.trim())) {
-                return h.validateValue(value);
+                return h.validateValue(appCtx, value);
             }
         }
-        return new Pair<Boolean, String>(false, "Unknown hint :" + hintName);
+        return new Pair<>(false, "Unknown hint :" + hintName);
     }
 
     private static Set<IHint> hints = initHints();
 
     private static Set<IHint> initHints() {
-        Set<IHint> hints = new HashSet<IHint>();
+        Set<IHint> hints = new HashSet<>();
         hints.add(new DatasetCardinalityHint());
         hints.add(new DatasetNodegroupCardinalityHint());
         return hints;
@@ -74,19 +74,19 @@
         }
 
         @Override
-        public Pair<Boolean, String> validateValue(String value) {
+        public Pair<Boolean, String> validateValue(ICcApplicationContext appCtx, String value) {
             boolean valid = true;
             long longValue;
             try {
                 longValue = Long.parseLong(value);
                 if (longValue < 0) {
-                    return new Pair<Boolean, String>(false, "Value must be >= 0");
+                    return new Pair<>(false, "Value must be >= 0");
                 }
             } catch (NumberFormatException nfe) {
                 valid = false;
-                return new Pair<Boolean, String>(valid, "Inappropriate value");
+                return new Pair<>(valid, "Inappropriate value");
             }
-            return new Pair<Boolean, String>(true, null);
+            return new Pair<>(true, null);
         }
 
     }
@@ -105,26 +105,25 @@
         }
 
         @Override
-        public Pair<Boolean, String> validateValue(String value) {
+        public Pair<Boolean, String> validateValue(ICcApplicationContext appCtx, String value) {
             boolean valid = true;
             int intValue;
             try {
                 intValue = Integer.parseInt(value);
                 if (intValue < 0) {
-                    return new Pair<Boolean, String>(false, "Value must be >= 0");
+                    return new Pair<>(false, "Value must be >= 0");
                 }
-                int numNodesInCluster = AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames()
-                        .size();
+                int numNodesInCluster = appCtx.getMetadataProperties().getNodeNames().size();
                 if (numNodesInCluster < intValue) {
-                    return new Pair<Boolean, String>(false,
+                    return new Pair<>(false,
                             "Value must be greater or equal to the existing number of nodes in cluster ("
                                     + numNodesInCluster + ")");
                 }
             } catch (NumberFormatException nfe) {
                 valid = false;
-                return new Pair<Boolean, String>(valid, "Inappropriate value");
+                return new Pair<>(valid, "Inappropriate value");
             }
-            return new Pair<Boolean, String>(true, null);
+            return new Pair<>(true, null);
         }
 
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java
index fefc066..97422f5 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.metadata.dataset.hints;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 
 /**
@@ -41,6 +42,6 @@
      *         first element as a boolean that represents the validation result.
      *         second element as the error message if the validation result is false
      */
-    public Pair<Boolean, String> validateValue(String value);
+    public Pair<Boolean, String> validateValue(ICcApplicationContext appCtx, String value);
 
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index aa76122..d111eb2 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -31,7 +31,7 @@
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.config.StorageProperties;
 import org.apache.asterix.common.context.IStorageComponentProvider;
-import org.apache.asterix.common.dataflow.IApplicationContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.dataflow.LSMInvertedIndexInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -86,7 +86,6 @@
 import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
 import org.apache.asterix.runtime.operators.LSMInvertedIndexUpsertOperatorDescriptor;
 import org.apache.asterix.runtime.operators.LSMTreeUpsertOperatorDescriptor;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.asterix.runtime.utils.RuntimeComponentsProvider;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
@@ -146,6 +145,7 @@
 
 public class MetadataProvider implements IMetadataProvider<DataSourceId, String> {
 
+    private final ICcApplicationContext appCtx;
     private final IStorageComponentProvider storaegComponentProvider;
     private final IMetadataPageManagerFactory metadataPageManagerFactory;
     private final IPrimitiveValueProviderFactory primitiveValueProviderFactory;
@@ -167,11 +167,13 @@
     private boolean isTemporaryDatasetWriteJob = true;
     private boolean blockingOperatorDisabled = false;
 
-    public MetadataProvider(Dataverse defaultDataverse, IStorageComponentProvider componentProvider) {
+    public MetadataProvider(ICcApplicationContext appCtx, Dataverse defaultDataverse,
+            IStorageComponentProvider componentProvider) {
+        this.appCtx = appCtx;
         this.defaultDataverse = defaultDataverse;
         this.storaegComponentProvider = componentProvider;
-        storageProperties = AppContextInfo.INSTANCE.getStorageProperties();
-        libraryManager = AppContextInfo.INSTANCE.getLibraryManager();
+        storageProperties = appCtx.getStorageProperties();
+        libraryManager = appCtx.getLibraryManager();
         metadataPageManagerFactory = componentProvider.getMetadataPageManagerFactory();
         primitiveValueProviderFactory = componentProvider.getPrimitiveValueProviderFactory();
         locks = new LockList();
@@ -417,8 +419,8 @@
     public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
             JobSpecification jobSpec, Feed primaryFeed, FeedPolicyAccessor policyAccessor) throws Exception {
         Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput;
-        factoryOutput =
-                FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx, libraryManager);
+        factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx,
+                getApplicationContext());
         ARecordType recordType = FeedMetadataUtil.getOutputType(primaryFeed, primaryFeed.getAdapterConfiguration(),
                 ExternalDataConstants.KEY_TYPE_NAME);
         IAdapterFactory adapterFactory = factoryOutput.first;
@@ -510,7 +512,7 @@
                         context.getBinaryComparatorFactoryProvider());
             }
 
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc;
             spPc = getSplitProviderAndConstraints(dataset, theIndex.getIndexName());
             int[] primaryKeyFields = new int[numPrimaryKeys];
@@ -593,7 +595,7 @@
                     outputVars, keysStartIndex, numNestedSecondaryKeyFields, typeEnv, context);
             ITypeTraits[] typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, keysStartIndex,
                     numNestedSecondaryKeyFields + numPrimaryKeys, typeEnv, context);
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc =
                     getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
             ARecordType metaType = null;
@@ -732,7 +734,7 @@
 
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                     getSplitProviderAndConstraints(dataset);
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             long numElementsHint = getCardinalityPerPartitionHint(dataset);
 
             // TODO
@@ -883,8 +885,8 @@
             Map<String, String> configuration, ARecordType itemType, ARecordType metaType) throws AlgebricksException {
         try {
             configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
-            IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(libraryManager, adapterName,
-                    configuration, itemType, metaType);
+            IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(
+                    getApplicationContext().getServiceContext(), adapterName, configuration, itemType, metaType);
 
             // check to see if dataset is indexed
             Index filesIndex =
@@ -969,9 +971,9 @@
                                 .getDatatype();
             }
             ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
-            LookupAdapterFactory<?> adapterFactory =
-                    AdapterFactoryProvider.getLookupAdapterFactory(libraryManager, datasetDetails.getProperties(),
-                            itemType, ridIndexes, retainInput, retainMissing, context.getMissingWriterFactory());
+            LookupAdapterFactory<?> adapterFactory = AdapterFactoryProvider.getLookupAdapterFactory(
+                    getApplicationContext().getServiceContext(), datasetDetails.getProperties(), itemType, ridIndexes,
+                    retainInput, retainMissing, context.getMissingWriterFactory());
 
             Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo;
             try {
@@ -988,7 +990,7 @@
                     fileIndex, itemType, metaType, compactionInfo.first, compactionInfo.second);
             // Create the out record descriptor, appContext and fileSplitProvider for the files index
             RecordDescriptor outRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc;
             spPc = metadataProvider.splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
                     dataset.getDatasetName(), fileIndexName, false);
@@ -1059,7 +1061,7 @@
                     .getDatatype(mdTxnCtx, itemTypeDataverseName, itemTypeName).getDatatype();
             ARecordType metaItemType = DatasetUtil.getMetaType(this, dataset);
             ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
                     itemType, metaItemType, context.getBinaryComparatorFactoryProvider());
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
@@ -1251,7 +1253,7 @@
             ARecordType metaItemType = DatasetUtil.getMetaType(this, dataset);
             ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
 
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
                     itemType, metaItemType, context.getBinaryComparatorFactoryProvider());
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
@@ -1450,7 +1452,7 @@
                 ++i;
             }
 
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                     getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
 
@@ -1587,7 +1589,7 @@
             ARecordType metaItemType = DatasetUtil.getMetaType(this, dataset);
             IBinaryComparatorFactory[] primaryComparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(
                     dataset, recType, metaItemType, context.getBinaryComparatorFactoryProvider());
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                     getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
             int[] btreeFields = new int[primaryComparatorFactories.length];
@@ -1808,7 +1810,7 @@
                 }
             }
 
-            IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+            ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                     getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
 
@@ -2074,4 +2076,8 @@
     public LockList getLocks() {
         return locks;
     }
+
+    public ICcApplicationContext getApplicationContext() {
+        return appCtx;
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 572130d..8fbefb9 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -26,7 +26,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.active.ActiveJobNotificationHandler;
+import org.apache.asterix.active.ActiveLifecycleListener;
 import org.apache.asterix.active.IActiveEntityEventsListener;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.context.IStorageComponentProvider;
@@ -275,13 +275,16 @@
      * @throws Exception
      *             if an error occur during the drop process or if the dataset can't be dropped for any reason
      */
-    public void drop(MetadataProvider metadataProvider, MutableObject<MetadataTransactionContext> mdTxnCtx,
-            List<JobSpecification> jobsToExecute, MutableBoolean bActiveTxn, MutableObject<ProgressState> progress,
-            IHyracksClientConnection hcc) throws Exception {
+    public void drop(MetadataProvider metadataProvider,
+            MutableObject<MetadataTransactionContext> mdTxnCtx, List<JobSpecification> jobsToExecute,
+            MutableBoolean bActiveTxn, MutableObject<ProgressState> progress, IHyracksClientConnection hcc)
+            throws Exception {
         Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<>();
         if (getDatasetType() == DatasetType.INTERNAL) {
             // prepare job spec(s) that would disconnect any active feeds involving the dataset.
-            IActiveEntityEventsListener[] activeListeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+            ActiveLifecycleListener activeListener =
+                    (ActiveLifecycleListener) metadataProvider.getApplicationContext().getActiveLifecycleListener();
+            IActiveEntityEventsListener[] activeListeners = activeListener.getNotificationHandler().getEventListeners();
             for (IActiveEntityEventsListener listener : activeListeners) {
                 if (listener.isEntityUsingDataset(this)) {
                     throw new CompilationException(ErrorCode.COMPILATION_CANT_DROP_ACTIVE_DATASET,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
index 4598e1e..2ca7215 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
@@ -22,7 +22,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
@@ -32,6 +31,7 @@
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -56,7 +56,7 @@
     }
 
     @Override
-    public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws IOException {
+    public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws HyracksDataException {
         byte[] serRecord = tuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = tuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = tuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -82,7 +82,7 @@
 
     @Override
     public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy)
-            throws IOException, MetadataException {
+            throws HyracksDataException, MetadataException {
 
         tupleBuilder.reset();
         aString.setValue(compactionPolicy.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 138cad7..c3c5023 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -23,7 +23,6 @@
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Date;
@@ -83,8 +82,8 @@
     public static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
 
     @SuppressWarnings("unchecked")
-    protected final ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
+    protected final ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
     protected final transient AMutableInt32 aInt32;
     protected final transient ISerializerDeserializer<AInt32> aInt32Serde;
     protected final transient ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage();
@@ -97,7 +96,7 @@
     }
 
     @Override
-    public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -107,32 +106,38 @@
         return createDatasetFromARecord(datasetRecord);
     }
 
-    protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws IOException {
+    protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws HyracksDataException {
 
-        String dataverseName = ((AString) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
-        String datasetName = ((AString) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
-        String typeName = ((AString) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX)).getStringValue();
+        String dataverseName =
+                ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX))
+                        .getStringValue();
+        String datasetName =
+                ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX))
+                        .getStringValue();
+        String typeName =
+                ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX))
+                        .getStringValue();
         String typeDataverseName = ((AString) datasetRecord
                 .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX)).getStringValue();
         DatasetType datasetType = DatasetType.valueOf(
                 ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX))
                         .getStringValue());
         IDatasetDetails datasetDetails = null;
-        int datasetId = ((AInt32) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX)).getIntegerValue();
-        int pendingOp = ((AInt32) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
-        String nodeGroupName = ((AString) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX)).getStringValue();
+        int datasetId =
+                ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX))
+                        .getIntegerValue();
+        int pendingOp =
+                ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX))
+                        .getIntegerValue();
+        String nodeGroupName =
+                ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX))
+                        .getStringValue();
         String compactionPolicy = ((AString) datasetRecord
                 .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX)).getStringValue();
         IACursor cursor = ((AOrderedList) datasetRecord
                 .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX))
                         .getCursor();
-        Map<String, String> compactionPolicyProperties = new LinkedHashMap<String, String>();
+        Map<String, String> compactionPolicyProperties = new LinkedHashMap<>();
         String key;
         String value;
         while (cursor.next()) {
@@ -154,14 +159,14 @@
                 cursor = ((AOrderedList) datasetDetailsRecord
                         .getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX))
                                 .getCursor();
-                List<List<String>> partitioningKey = new ArrayList<List<String>>();
-                List<IAType> partitioningKeyType = new ArrayList<IAType>();
+                List<List<String>> partitioningKey = new ArrayList<>();
+                List<IAType> partitioningKeyType = new ArrayList<>();
 
                 AOrderedList fieldNameList;
                 while (cursor.next()) {
                     fieldNameList = (AOrderedList) cursor.get();
                     IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
-                    List<String> nestedFieldName = new ArrayList<String>();
+                    List<String> nestedFieldName = new ArrayList<>();
                     while (nestedFieldNameCursor.next()) {
                         nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
                     }
@@ -175,10 +180,10 @@
 
                 // Check if there is a filter field.
                 List<String> filterField = null;
-                int filterFieldPos = datasetDetailsRecord.getType()
-                        .getFieldIndex(InternalDatasetDetails.FILTER_FIELD_NAME);
+                int filterFieldPos =
+                        datasetDetailsRecord.getType().getFieldIndex(InternalDatasetDetails.FILTER_FIELD_NAME);
                 if (filterFieldPos >= 0) {
-                    filterField = new ArrayList<String>();
+                    filterField = new ArrayList<>();
                     cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(filterFieldPos)).getCursor();
                     while (cursor.next()) {
                         filterField.add(((AString) cursor.get()).getStringValue());
@@ -218,7 +223,7 @@
                 cursor = ((AOrderedList) datasetDetailsRecord
                         .getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX))
                                 .getCursor();
-                Map<String, String> properties = new HashMap<String, String>();
+                Map<String, String> properties = new HashMap<>();
                 while (cursor.next()) {
                     ARecord field = (ARecord) cursor.get();
                     key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
@@ -233,10 +238,9 @@
                         .getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_LAST_REFRESH_TIME_FIELD_INDEX)))
                                 .getChrononTime());
                 // State
-                TransactionState state = TransactionState
-                        .values()[((AInt32) datasetDetailsRecord.getValueByPos(
-                                MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX))
-                                        .getIntegerValue()];
+                TransactionState state = TransactionState.values()[((AInt32) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX))
+                                .getIntegerValue()];
 
                 datasetDetails = new ExternalDatasetDetails(adapter, properties, timestamp, state);
         }
@@ -245,11 +249,11 @@
 
         String metaTypeDataverseName = null;
         String metaTypeName = null;
-        int metaTypeDataverseNameIndex = datasetRecord.getType()
-                .getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
+        int metaTypeDataverseNameIndex =
+                datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
         if (metaTypeDataverseNameIndex >= 0) {
-            metaTypeDataverseName = ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex))
-                    .getStringValue();
+            metaTypeDataverseName =
+                    ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex)).getStringValue();
             int metaTypeNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_NAME);
             metaTypeName = ((AString) datasetRecord.getValueByPos(metaTypeNameIndex)).getStringValue();
         }
@@ -260,7 +264,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws HyracksDataException, MetadataException {
         OrderedListBuilder listBuilder = new OrderedListBuilder();
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
         // write the key in the first 2 fields of the tuple
@@ -423,11 +427,11 @@
     }
 
     protected Map<String, String> getDatasetHints(ARecord datasetRecord) {
-        Map<String, String> hints = new HashMap<String, String>();
+        Map<String, String> hints = new HashMap<>();
         String key;
         String value;
-        AUnorderedList list = (AUnorderedList) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX);
+        AUnorderedList list =
+                (AUnorderedList) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX);
         IACursor cursor = list.getCursor();
         while (cursor.next()) {
             ARecord field = (ARecord) cursor.get();
@@ -444,8 +448,8 @@
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
         propertyRecordBuilder.reset(MetadataRecordTypes.DATASET_HINTS_RECORDTYPE);
         AMutableString aString = new AMutableString("");
-        ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
-                .getSerializerDeserializer(BuiltinType.ASTRING);
+        ISerializerDeserializer<AString> stringSerde =
+                SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
 
         // write field 0
         fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 44de381..7f4e28d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -22,7 +22,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.util.Calendar;
 
 import org.apache.asterix.external.api.IDataSourceAdapter;
@@ -35,6 +34,7 @@
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public class DatasourceAdapterTupleTranslator extends AbstractTupleTranslator<DatasourceAdapter> {
@@ -57,7 +57,8 @@
     }
 
     @Override
-    public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException {
+    public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple)
+            throws MetadataException, HyracksDataException {
         byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -84,7 +85,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter)
+            throws HyracksDataException, MetadataException {
         // write the key in the first 2 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(adapter.getAdapterIdentifier().getNamespace());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index b81ec29..d202e1a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -23,7 +23,6 @@
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
-import java.io.IOException;
 import java.rmi.RemoteException;
 import java.util.Calendar;
 
@@ -53,10 +52,10 @@
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.utils.NonTaggedFormatUtil;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 
 /**
  * Translates a Datatype metadata entity to an ITupleReference and vice versa.
@@ -77,8 +76,8 @@
     };
 
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.DATATYPE_RECORDTYPE);
+    private ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATATYPE_RECORDTYPE);
     private final MetadataNode metadataNode;
     private final JobId jobId;
 
@@ -89,7 +88,8 @@
     }
 
     @Override
-    public Datatype getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+    public Datatype getMetadataEntityFromTuple(ITupleReference frameTuple)
+            throws MetadataException, HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -142,8 +142,8 @@
                         boolean isNullable = ((ABoolean) field
                                 .getValueByPos(MetadataRecordTypes.FIELD_ARECORD_ISNULLABLE_FIELD_INDEX)).getBoolean()
                                         .booleanValue();
-                        fieldTypes[fieldId] = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId,
-                                dataverseName, fieldTypeName, isNullable);
+                        fieldTypes[fieldId] = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dataverseName,
+                                fieldTypeName, isNullable);
                         fieldId++;
                     }
                     return new Datatype(dataverseName, datatypeName,
@@ -163,8 +163,8 @@
                             .getValueByPos(MetadataRecordTypes.DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX))
                                     .getStringValue();
                     return new Datatype(dataverseName, datatypeName,
-                            new AOrderedListType(BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId,
-                                    dataverseName, orderedlistTypeName, false), datatypeName),
+                            new AOrderedListType(BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dataverseName,
+                                    orderedlistTypeName, false), datatypeName),
                             isAnonymous);
                 }
                 default:
@@ -175,7 +175,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Datatype dataType)
+            throws HyracksDataException, MetadataException {
         // write the key in the first two fields of the tuple
         tupleBuilder.reset();
         aString.setValue(dataType.getDataverseName());
@@ -368,8 +369,13 @@
         } catch (MetadataException e) {
             // The nested record type may have been inserted by a previous DDL statement or by
             // a previous nested type.
-            if (!(e.getCause() instanceof TreeIndexDuplicateKeyException)) {
-                throw new HyracksDataException(e);
+            if (!(e.getCause() instanceof HyracksDataException)) {
+                throw HyracksDataException.create(e);
+            } else {
+                HyracksDataException hde = (HyracksDataException) e.getCause();
+                if (!hde.getComponent().equals(ErrorCode.HYRACKS) || hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw hde;
+                }
             }
         } catch (RemoteException e) {
             // TODO: This should not be a HyracksDataException. Can't
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index ac4fde2..89aaad3 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -22,7 +22,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.util.Calendar;
 
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
@@ -36,6 +35,7 @@
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -52,8 +52,8 @@
     protected ISerializerDeserializer<AInt32> aInt32Serde;
 
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.DATAVERSE_RECORDTYPE);
+    private ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATAVERSE_RECORDTYPE);
 
     @SuppressWarnings("unchecked")
     protected DataverseTupleTranslator(boolean getTuple) {
@@ -63,7 +63,7 @@
     }
 
     @Override
-    public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -76,7 +76,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Dataverse instance) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Dataverse instance)
+            throws HyracksDataException, MetadataException {
         // write the key in the first field of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
index 66b6af3..ea04f1d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
@@ -21,7 +21,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.util.Date;
 
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
@@ -40,6 +39,7 @@
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public class ExternalFileTupleTranslator extends AbstractTupleTranslator<ExternalFile> {
@@ -58,14 +58,14 @@
     protected transient AMutableInt64 aInt64 = new AMutableInt64(0);
 
     @SuppressWarnings("unchecked")
-    protected ISerializerDeserializer<AInt32> intSerde = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT32);
+    protected ISerializerDeserializer<AInt32> intSerde =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
     @SuppressWarnings("unchecked")
-    protected ISerializerDeserializer<ADateTime> dateTimeSerde = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ADATETIME);
+    protected ISerializerDeserializer<ADateTime> dateTimeSerde =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
     @SuppressWarnings("unchecked")
-    protected ISerializerDeserializer<AInt64> longSerde = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT64);
+    protected ISerializerDeserializer<AInt64> longSerde =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
     @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(MetadataRecordTypes.EXTERNAL_FILE_RECORDTYPE);
@@ -75,7 +75,8 @@
     }
 
     @Override
-    public ExternalFile getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException {
+    public ExternalFile getMetadataEntityFromTuple(ITupleReference tuple)
+            throws MetadataException, HyracksDataException {
         byte[] serRecord = tuple.getFieldData(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = tuple.getFieldStart(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = tuple.getFieldLength(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -106,7 +107,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(ExternalFile externalFile) throws MetadataException, IOException {
+    public ITupleReference getTupleFromMetadataEntity(ExternalFile externalFile)
+            throws MetadataException, HyracksDataException {
         // write the key in the first 3 fields of the tuple
         tupleBuilder.reset();
         // dataverse name
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
index 0adcda5..420e4fc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
@@ -19,6 +19,12 @@
 
 package org.apache.asterix.metadata.entitytupletranslators;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.UnorderedListBuilder;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -27,20 +33,18 @@
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.FeedConnection;
-import org.apache.asterix.om.base.*;
+import org.apache.asterix.om.base.AMissing;
+import org.apache.asterix.om.base.ANull;
+import org.apache.asterix.om.base.ARecord;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.AUnorderedList;
+import org.apache.asterix.om.base.IACursor;
 import org.apache.asterix.om.types.AUnorderedListType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
 public class FeedConnectionTupleTranslator extends AbstractTupleTranslator<FeedConnection> {
 
     public static final int FEED_CONN_DATAVERSE_NAME_FIELD_INDEX = 0;
@@ -57,7 +61,8 @@
     }
 
     @Override
-    public FeedConnection getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+    public FeedConnection getMetadataEntityFromTuple(ITupleReference frameTuple)
+            throws MetadataException, HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -68,12 +73,14 @@
     }
 
     private FeedConnection createFeedConnFromRecord(ARecord feedConnRecord) {
-        String dataverseName = ((AString) feedConnRecord
-                .getValueByPos(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+        String dataverseName =
+                ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX))
+                        .getStringValue();
         String feedName = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_FEED_NAME_FIELD_INDEX))
                 .getStringValue();
-        String datasetName = ((AString) feedConnRecord
-                .getValueByPos(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX)).getStringValue();
+        String datasetName =
+                ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX))
+                        .getStringValue();
         String outputType = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_OUTPUT_TYPE_INDEX))
                 .getStringValue();
         String policyName = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_POLICY_FIELD_INDEX))
@@ -98,7 +105,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(FeedConnection me) throws MetadataException, IOException {
+    public ITupleReference getTupleFromMetadataEntity(FeedConnection me)
+            throws MetadataException, HyracksDataException {
         tupleBuilder.reset();
 
         // key: dataverse
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
index c82a073..7b631b6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -23,7 +23,6 @@
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -73,7 +72,7 @@
     }
 
     @Override
-    public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -95,7 +94,7 @@
 
         IACursor cursor = ((AUnorderedList) feedPolicyRecord
                 .getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
-        Map<String, String> policyParamters = new HashMap<String, String>();
+        Map<String, String> policyParamters = new HashMap<>();
         String key;
         String value;
         while (cursor.next()) {
@@ -111,7 +110,7 @@
 
     @Override
     public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy)
-            throws IOException, MetadataException {
+            throws HyracksDataException, MetadataException {
         // write the key in the first three fields of the tuple
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
 
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
index 4503e09..4737d79 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
@@ -23,7 +23,6 @@
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.Calendar;
 import java.util.HashMap;
 import java.util.Map;
@@ -62,15 +61,15 @@
     public static final int FEED_PAYLOAD_TUPLE_FIELD_INDEX = 2;
 
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.FEED_RECORDTYPE);
+    private ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.FEED_RECORDTYPE);
 
     protected FeedTupleTranslator(boolean getTuple) {
         super(getTuple, MetadataPrimaryIndexes.FEED_DATASET.getFieldCount());
     }
 
     @Override
-    public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -82,15 +81,16 @@
 
     private Feed createFeedFromARecord(ARecord feedRecord) {
         Feed feed;
-        String dataverseName = ((AString) feedRecord
-                .getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+        String dataverseName =
+                ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX))
+                        .getStringValue();
         String feedName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX))
                 .getStringValue();
 
-        AUnorderedList feedConfig = (AUnorderedList) feedRecord
-                .getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX);
-        String adapterName = ((AString) feedRecord
-                .getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_NAME_INDEX)).getStringValue();
+        AUnorderedList feedConfig =
+                (AUnorderedList) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX);
+        String adapterName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_NAME_INDEX))
+                .getStringValue();
 
         IACursor cursor = feedConfig.getCursor();
 
@@ -109,7 +109,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Feed feed) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Feed feed) throws HyracksDataException, MetadataException {
         // write the key in the first two fields of the tuple
         tupleBuilder.reset();
         aString.setValue(feed.getDataverseName());
@@ -182,8 +182,8 @@
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
         propertyRecordBuilder.reset(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
         AMutableString aString = new AMutableString("");
-        ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
-                .getSerializerDeserializer(BuiltinType.ASTRING);
+        ISerializerDeserializer<AString> stringSerde =
+                SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
 
         // write field 0
         fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index 1487f02..64c4035 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -22,7 +22,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -38,6 +37,7 @@
 import org.apache.asterix.om.base.IACursor;
 import org.apache.asterix.om.types.AOrderedListType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
@@ -57,15 +57,15 @@
     public static final int FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
 
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.FUNCTION_RECORDTYPE);
+    private ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.FUNCTION_RECORDTYPE);
 
     protected FunctionTupleTranslator(boolean getTuple) {
         super(getTuple, MetadataPrimaryIndexes.FUNCTION_DATASET.getFieldCount());
     }
 
     @Override
-    public Function getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public Function getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -87,7 +87,7 @@
 
         IACursor cursor = ((AOrderedList) functionRecord
                 .getValueByPos(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_PARAM_LIST_FIELD_INDEX)).getCursor();
-        List<String> params = new ArrayList<String>();
+        List<String> params = new ArrayList<>();
         while (cursor.next()) {
             params.add(((AString) cursor.get()).getStringValue());
         }
@@ -110,7 +110,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Function function) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Function function)
+            throws HyracksDataException, MetadataException {
         // write the key in the first 2 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(function.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index 6446e67..f29c7f7 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -22,7 +22,7 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
+import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.List;
@@ -54,6 +54,7 @@
 import org.apache.asterix.om.types.IAType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
@@ -103,7 +104,7 @@
     }
 
     @Override
-    public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+    public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -114,14 +115,13 @@
                 .getStringValue();
         String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX))
                 .getStringValue();
-        String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX))
-                .getStringValue();
+        String indexName =
+                ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
         IndexType indexStructure = IndexType
                 .valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX))
                         .getStringValue());
         IACursor fieldNameCursor =
-                ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX))
-                        .getCursor();
+                ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
         List<List<String>> searchKey = new ArrayList<>();
         AOrderedList fieldNameList;
         while (fieldNameCursor.next()) {
@@ -152,8 +152,8 @@
         }
         Boolean isPrimaryIndex =
                 ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
-        int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX))
-                .getIntegerValue();
+        int pendingOp =
+                ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
         // Check if there is a gram length as well.
         int gramLength = -1;
         int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
@@ -177,22 +177,26 @@
 
         // index key type information is not persisted, thus we extract type information from the record metadata
         if (searchKeyType.isEmpty()) {
-            Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
-            String datatypeName = dSet.getItemTypeName();
-            String datatypeDataverseName = dSet.getItemTypeDataverseName();
-            ARecordType recordDt =
-                    (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
-            String metatypeName = dSet.getMetaItemTypeName();
-            String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
-            ARecordType metaDt = null;
-            if (metatypeName != null && metatypeDataverseName != null) {
-                metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName)
-                        .getDatatype();
-            }
             try {
-                searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
-            } catch (AlgebricksException e) {
-                throw new MetadataException(e);
+                Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
+                String datatypeName = dSet.getItemTypeName();
+                String datatypeDataverseName = dSet.getItemTypeDataverseName();
+                ARecordType recordDt = (ARecordType) metadataNode
+                        .getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
+                String metatypeName = dSet.getMetaItemTypeName();
+                String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
+                ARecordType metaDt = null;
+                if (metatypeName != null && metatypeDataverseName != null) {
+                    metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName)
+                            .getDatatype();
+                }
+                try {
+                    searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
+                } catch (AlgebricksException e) {
+                    throw new MetadataException(e);
+                }
+            } catch (RemoteException re) {
+                throw HyracksDataException.create(re);
             }
         }
         return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType,
@@ -200,7 +204,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException {
+    public ITupleReference getTupleFromMetadataEntity(Index instance) throws HyracksDataException {
         // write the key in the first 3 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
index dfb8ced..1d8cff8 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
@@ -22,7 +22,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.util.Calendar;
 
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
@@ -33,6 +32,7 @@
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -49,15 +49,15 @@
     public static final int LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
 
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.LIBRARY_RECORDTYPE);
+    private ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.LIBRARY_RECORDTYPE);
 
     protected LibraryTupleTranslator(boolean getTuple) {
         super(getTuple, MetadataPrimaryIndexes.LIBRARY_DATASET.getFieldCount());
     }
 
     @Override
-    public Library getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public Library getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -79,7 +79,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Library library) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Library library) throws HyracksDataException, MetadataException {
         // write the key in the first 2 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(library.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index 6e865d05..c2cdf0d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -22,7 +22,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.List;
@@ -39,6 +38,7 @@
 import org.apache.asterix.om.base.IACursor;
 import org.apache.asterix.om.types.AUnorderedListType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
@@ -56,28 +56,28 @@
 
     private transient UnorderedListBuilder listBuilder = new UnorderedListBuilder();
     private transient ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
-    private List<String> nodeNames;
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(MetadataRecordTypes.NODEGROUP_RECORDTYPE);
+    private ISerializerDeserializer<ARecord> recordSerDes =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.NODEGROUP_RECORDTYPE);
 
     protected NodeGroupTupleTranslator(boolean getTuple) {
         super(getTuple, MetadataPrimaryIndexes.NODEGROUP_DATASET.getFieldCount());
     }
 
     @Override
-    public NodeGroup getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public NodeGroup getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
         byte[] serRecord = frameTuple.getFieldData(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
         ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
         DataInput in = new DataInputStream(stream);
         ARecord nodeGroupRecord = recordSerDes.deserialize(in);
-        String gpName = ((AString) nodeGroupRecord
-                .getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_GROUPNAME_FIELD_INDEX)).getStringValue();
+        String gpName =
+                ((AString) nodeGroupRecord.getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_GROUPNAME_FIELD_INDEX))
+                        .getStringValue();
         IACursor cursor = ((AUnorderedList) nodeGroupRecord
                 .getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX)).getCursor();
-        List<String> nodeNames = new ArrayList<String>();
+        List<String> nodeNames = new ArrayList<>();
         while (cursor.next()) {
             nodeNames.add(((AString) cursor.get()).getStringValue());
         }
@@ -85,7 +85,8 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(NodeGroup instance)
+            throws HyracksDataException, MetadataException {
         // write the key in the first field of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getNodeGroupName());
@@ -103,8 +104,8 @@
         // write field 1
         listBuilder.reset((AUnorderedListType) MetadataRecordTypes.NODEGROUP_RECORDTYPE
                 .getFieldTypes()[MetadataRecordTypes.NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX]);
-        this.nodeNames = instance.getNodeNames();
-        for (String nodeName : this.nodeNames) {
+        List<String> nodeNames = instance.getNodeNames();
+        for (String nodeName : nodeNames) {
             itemValue.reset();
             aString.setValue(nodeName);
             stringSerde.serialize(aString, itemValue.getDataOutput());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index 7856ed8..dae11bc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -19,8 +19,6 @@
 
 package org.apache.asterix.metadata.entitytupletranslators;
 
-import java.io.IOException;
-
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -31,6 +29,7 @@
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -46,8 +45,8 @@
 
     private transient AMutableInt64 aInt64 = new AMutableInt64(-1);
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT64);
+    private ISerializerDeserializer<AInt64> int64Serde =
+            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
 
     // @SuppressWarnings("unchecked")
     // private ISerializerDeserializer<ARecord> recordSerDes =
@@ -59,7 +58,7 @@
     }
 
     @Override
-    public Node getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public Node getMetadataEntityFromTuple(ITupleReference frameTuple) {
         throw new NotImplementedException();
         // TODO: Implement this.
         // try {
@@ -85,7 +84,7 @@
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Node instance) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Node instance) throws HyracksDataException, MetadataException {
         // write the key in the first field of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getNodeName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index b54c9e6..18e4676 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -22,11 +22,11 @@
 import java.util.Map;
 
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
-import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
@@ -95,7 +95,7 @@
         return feedPolicy;
     }
 
-    public static void validateFeed(Feed feed, MetadataTransactionContext mdTxnCtx, ILibraryManager libraryManager)
+    public static void validateFeed(Feed feed, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx)
             throws AsterixException {
         try {
             String adapterName = feed.getAdapterName();
@@ -122,7 +122,8 @@
                     case EXTERNAL:
                         String[] anameComponents = adapterName.split("#");
                         String libraryName = anameComponents[0];
-                        ClassLoader cl = libraryManager.getLibraryClassLoader(feed.getDataverseName(), libraryName);
+                        ClassLoader cl =
+                                appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
                         adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
                         break;
                     default:
@@ -130,10 +131,10 @@
                 }
                 adapterFactory.setOutputType(adapterOutputType);
                 adapterFactory.setMetaType(metaType);
-                adapterFactory.configure(null, configuration);
+                adapterFactory.configure(appCtx.getServiceContext(), configuration);
             } else {
-                AdapterFactoryProvider.getAdapterFactory(libraryManager, adapterName, configuration, adapterOutputType,
-                        metaType);
+                AdapterFactoryProvider.getAdapterFactory(appCtx.getServiceContext(), adapterName, configuration,
+                        adapterOutputType, metaType);
             }
             if (metaType == null && configuration.containsKey(ExternalDataConstants.KEY_META_TYPE_NAME)) {
                 metaType = getOutputType(feed, configuration, ExternalDataConstants.KEY_META_TYPE_NAME);
@@ -159,7 +160,7 @@
 
     @SuppressWarnings("rawtypes")
     public static Triple<IAdapterFactory, RecordDescriptor, AdapterType> getPrimaryFeedFactoryAndOutput(Feed feed,
-            FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx, ILibraryManager libraryManager)
+            FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx)
             throws AlgebricksException {
         // This method needs to be re-visited
         String adapterName = null;
@@ -194,7 +195,8 @@
                     case EXTERNAL:
                         String[] anameComponents = adapterName.split("#");
                         String libraryName = anameComponents[0];
-                        ClassLoader cl = libraryManager.getLibraryClassLoader(feed.getDataverseName(), libraryName);
+                        ClassLoader cl =
+                                appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
                         adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
                         break;
                     default:
@@ -202,10 +204,10 @@
                 }
                 adapterFactory.setOutputType(adapterOutputType);
                 adapterFactory.setMetaType(metaType);
-                adapterFactory.configure(null, configuration);
+                adapterFactory.configure(appCtx.getServiceContext(), configuration);
             } else {
-                adapterFactory = AdapterFactoryProvider.getAdapterFactory(libraryManager, adapterName, configuration,
-                        adapterOutputType, metaType);
+                adapterFactory = AdapterFactoryProvider.getAdapterFactory(appCtx.getServiceContext(), adapterName,
+                        configuration, adapterOutputType, metaType);
                 adapterType = IDataSourceAdapter.AdapterType.INTERNAL;
             }
             if (metaType == null) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index ca56cc3..6cb4a4f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -320,18 +320,18 @@
         return null;
     }
 
-    public static JobSpecification createDropDatasetJobSpec(Dataset dataset, Index primaryIndex,
-            MetadataProvider metadataProvider)
+    public static JobSpecification createDropDatasetJobSpec(Dataset dataset,
+            Index primaryIndex, MetadataProvider metadataProvider)
             throws AlgebricksException, HyracksDataException, RemoteException, ACIDException {
         String datasetPath = dataset.getDataverseName() + File.separator + dataset.getDatasetName();
         LOGGER.info("DROP DATASETPATH: " + datasetPath);
         if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            return RuntimeUtils.createJobSpecification();
+            return RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         }
         ARecordType itemType =
                 (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
         ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
-        JobSpecification specPrimary = RuntimeUtils.createJobSpecification();
+        JobSpecification specPrimary = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadataProvider.getSplitProviderAndConstraints(dataset);
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
@@ -352,7 +352,7 @@
     public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset)
             throws AlgebricksException {
         String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName());
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadataProvider.splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
@@ -380,12 +380,12 @@
         String datasetPath = dataset.getDataverseName() + File.separator + dataset.getDatasetName();
         LOGGER.info("DROP DATASETPATH: " + datasetPath);
         if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            return RuntimeUtils.createJobSpecification();
+            return RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         }
         ARecordType itemType =
                 (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
         ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
-        JobSpecification specPrimary = RuntimeUtils.createJobSpecification();
+        JobSpecification specPrimary = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadataProvider.getSplitProviderAndConstraints(dataset);
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
@@ -406,8 +406,8 @@
         return specPrimary;
     }
 
-    public static JobSpecification createDatasetJobSpec(Dataverse dataverse, String datasetName,
-            MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+    public static JobSpecification createDatasetJobSpec(Dataverse dataverse,
+            String datasetName, MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         String dataverseName = dataverse.getDataverseName();
         IDataFormat format;
@@ -430,7 +430,7 @@
             metaItemType = (ARecordType) metadataProvider.findType(dataset.getMetaItemTypeDataverseName(),
                     dataset.getMetaItemTypeName());
         }
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
                 itemType, metaItemType, format.getBinaryComparatorFactoryProvider());
         ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
@@ -474,8 +474,8 @@
         return spec;
     }
 
-    public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
-            MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+    public static JobSpecification compactDatasetJobSpec(Dataverse dataverse,
+            String datasetName, MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
         String dataverseName = dataverse.getDataverseName();
         IDataFormat format;
         try {
@@ -490,7 +490,7 @@
         ARecordType itemType =
                 (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
         ARecordType metaItemType = DatasetUtil.getMetaType(metadataProvider, dataset);
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
                 itemType, metaItemType, format.getBinaryComparatorFactoryProvider());
         ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
index edaa73e..ed6de39 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
@@ -191,7 +191,7 @@
             List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider, boolean createIndex)
             throws AlgebricksException {
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
                 DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
@@ -205,10 +205,9 @@
         Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                 dataset.getDataverseName(), dataset.getDatasetName(), fileIndexName);
         IResourceFactory localResourceMetadata = new ExternalBTreeLocalResourceMetadataFactory(
-                filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
-                FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, false, dataset.getDatasetId(),
-                mergePolicyFactory, mergePolicyFactoryProperties, dataset.getIndexOperationTrackerFactory(fileIndex),
-                dataset.getIoOperationCallbackFactory(fileIndex),
+                filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS, FilesIndexDescription.FILES_INDEX_COMP_FACTORIES,
+                new int[] { 0 }, false, dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties,
+                dataset.getIndexOperationTrackerFactory(fileIndex), dataset.getIoOperationCallbackFactory(fileIndex),
                 storageComponentProvider.getMetadataPageManagerFactory());
         PersistentLocalResourceFactoryProvider localResourceFactoryProvider =
                 new PersistentLocalResourceFactoryProvider(localResourceMetadata, LocalResource.ExternalBTreeResource);
@@ -244,9 +243,9 @@
             List<ExternalFile> files, RecordDescriptor indexerDesc) throws HyracksDataException, AlgebricksException {
         ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
         Map<String, String> configuration = externalDatasetDetails.getProperties();
-        IAdapterFactory adapterFactory =
-                AdapterFactoryProvider.getIndexingAdapterFactory(metadataProvider.getLibraryManager(),
-                        externalDatasetDetails.getAdapter(), configuration, (ARecordType) itemType, files, true, null);
+        IAdapterFactory adapterFactory = AdapterFactoryProvider.getIndexingAdapterFactory(
+                metadataProvider.getApplicationContext().getServiceContext(), externalDatasetDetails.getAdapter(),
+                configuration, (ARecordType) itemType, files, true, null);
         return new Pair<>(new ExternalScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
                 adapterFactory.getPartitionConstraint());
     }
@@ -311,8 +310,8 @@
                 } else {
                     // Same file name, Different file mod date -> delete and add
                     metadataFile.setPendingOp(ExternalFilePendingOp.DROP_OP);
-                    deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
-                            0, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile.getSize(),
+                    deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), 0,
+                            metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile.getSize(),
                             ExternalFilePendingOp.DROP_OP));
                     fileSystemFile.setPendingOp(ExternalFilePendingOp.ADD_OP);
                     fileSystemFile.setFileNumber(newFileNumber);
@@ -375,7 +374,7 @@
     public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset)
             throws AlgebricksException {
         String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName());
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadataProvider.splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
                         dataset.getDatasetName(), indexName, true);
@@ -408,9 +407,9 @@
             } else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
                 for (ExternalFile appendedFile : appendedFiles) {
                     if (appendedFile.getFileName().equals(file.getFileName())) {
-                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(),
-                                file.getFileNumber(), file.getFileName(), file.getLastModefiedTime(),
-                                appendedFile.getSize(), ExternalFilePendingOp.NO_OP));
+                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(),
+                                file.getFileName(), file.getLastModefiedTime(), appendedFile.getSize(),
+                                ExternalFilePendingOp.NO_OP));
                     }
                 }
             }
@@ -449,20 +448,19 @@
 
     public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider)
             throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
                 DatasetUtil.getMergePolicyFactory(ds, metadataProvider.getMetadataTxnContext());
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
         Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint =
-                metadataProvider.getSplitProviderAndConstraints(ds,
-                        IndexingConstants.getFilesIndexName(ds.getDatasetName()));
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName()));
         IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
         String fileIndexName = BTreeDataflowHelperFactoryProvider.externalFileIndexName(ds);
         Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                 ds.getDataverseName(), ds.getDatasetName(), fileIndexName);
-        IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(
-                metadataProvider, fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
+        IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(metadataProvider,
+                fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
         IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
                 RuntimeComponentsProvider.RUNTIME_PROVIDER, RuntimeComponentsProvider.RUNTIME_PROVIDER);
 
@@ -492,20 +490,19 @@
 
     public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider)
             throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
                 DatasetUtil.getMergePolicyFactory(ds, metadataProvider.getMetadataTxnContext());
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
         Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint =
-                metadataProvider.getSplitProviderAndConstraints(ds,
-                        IndexingConstants.getFilesIndexName(ds.getDatasetName()));
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName()));
         IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
         String fileIndexName = BTreeDataflowHelperFactoryProvider.externalFileIndexName(ds);
         Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                 ds.getDataverseName(), ds.getDatasetName(), fileIndexName);
-        IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(
-                metadataProvider, fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
+        IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(metadataProvider,
+                fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
         IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
                 RuntimeComponentsProvider.RUNTIME_PROVIDER, RuntimeComponentsProvider.RUNTIME_PROVIDER);
 
@@ -536,20 +533,19 @@
 
     public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider)
             throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
                 DatasetUtil.getMergePolicyFactory(ds, metadataProvider.getMetadataTxnContext());
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
         Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint =
-                metadataProvider.getSplitProviderAndConstraints(ds,
-                        IndexingConstants.getFilesIndexName(ds.getDatasetName()));
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName()));
         IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
         String fileIndexName = BTreeDataflowHelperFactoryProvider.externalFileIndexName(ds);
         Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                 ds.getDataverseName(), ds.getDatasetName(), fileIndexName);
-        IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(
-                metadataProvider, fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
+        IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(metadataProvider,
+                fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
         IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
                 RuntimeComponentsProvider.RUNTIME_PROVIDER, RuntimeComponentsProvider.RUNTIME_PROVIDER);
 
@@ -579,7 +575,7 @@
 
     public static JobSpecification compactFilesIndexJobSpec(Dataset dataset, MetadataProvider metadataProvider,
             IStorageComponentProvider storageComponentProvider) throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
                 DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
         ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
index 83403d5..c6dc211 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
@@ -18,7 +18,12 @@
  */
 package org.apache.asterix.metadata.utils;
 
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.asterix.common.config.OptimizationConfUtil;
+import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.external.indexing.ExternalFile;
@@ -26,14 +31,18 @@
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.runtime.utils.RuntimeUtils;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.job.JobSpecification;
-
-import java.util.Collections;
-import java.util.List;
+import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
+import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 
 public class IndexUtil {
 
@@ -90,8 +99,7 @@
     }
 
     public static JobSpecification buildDropIndexJobSpec(Index index, MetadataProvider metadataProvider,
-            Dataset dataset)
-            throws AlgebricksException {
+            Dataset dataset) throws AlgebricksException {
         ARecordType recordType =
                 (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
         ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
@@ -109,8 +117,8 @@
         return secondaryIndexHelper.buildDropJobSpec();
     }
 
-    public static JobSpecification buildSecondaryIndexCreationJobSpec(Dataset dataset, Index index,
-            ARecordType recType, ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType,
+    public static JobSpecification buildSecondaryIndexCreationJobSpec(Dataset dataset, Index index, ARecordType recType,
+            ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType,
             MetadataProvider metadataProvider) throws AlgebricksException {
         SecondaryIndexOperationsHelper secondaryIndexHelper =
                 SecondaryIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider,
@@ -137,6 +145,31 @@
         return secondaryIndexHelper.buildLoadingJobSpec();
     }
 
+    public static JobSpecification buildDropSecondaryIndexJobSpec(Index index, MetadataProvider metadataProvider,
+            Dataset dataset) throws AlgebricksException {
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+        IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
+                metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName());
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
+                DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
+        ARecordType recordType =
+                (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+        ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
+        IIndexDataflowHelperFactory dataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
+                index, recordType, metaType, compactionInfo.first, compactionInfo.second);
+        // The index drop operation should be persistent regardless of temp datasets or permanent dataset.
+        IndexDropOperatorDescriptor btreeDrop =
+                new IndexDropOperatorDescriptor(spec, storageComponentProvider.getStorageManager(),
+                        storageComponentProvider.getIndexLifecycleManagerProvider(), splitsAndConstraint.first,
+                        dataflowHelperFactory, storageComponentProvider.getMetadataPageManagerFactory());
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop,
+                splitsAndConstraint.second);
+        spec.addRoot(btreeDrop);
+
+        return spec;
+    }
+
     public static JobSpecification buildSecondaryIndexCompactJobSpec(Dataset dataset, Index index, ARecordType recType,
             ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType,
             MetadataProvider metadataProvider) throws AlgebricksException {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java
index 59068c8..faea7fd 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java
@@ -22,7 +22,6 @@
 
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.GlobalConfig;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.external.indexing.IndexingConstants;
 import org.apache.asterix.external.operators.ExternalScanOperatorDescriptor;
@@ -62,25 +61,23 @@
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
 import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
 import org.apache.hyracks.storage.common.file.LocalResource;
 
 public class SecondaryBTreeOperationsHelper extends SecondaryTreeIndexOperationsHelper {
 
     protected SecondaryBTreeOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
-            IPropertiesProvider propertiesProvider, MetadataProvider metadataProvider, ARecordType recType,
-            ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
-        super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
-                enforcedMetaType);
+            MetadataProvider metadataProvider, ARecordType recType, ARecordType metaType, ARecordType enforcedType,
+            ARecordType enforcedMetaType) {
+        super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
     }
 
     @Override
     public JobSpecification buildCreationJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         ILocalResourceFactoryProvider localResourceFactoryProvider;
-        IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(
-                metadataProvider, index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
+        IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
+                index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         if (dataset.getDatasetType() == DatasetType.INTERNAL) {
             //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
@@ -109,9 +106,9 @@
                 new TreeIndexCreateOperatorDescriptor(spec, storageComponentProvider.getStorageManager(),
                         storageComponentProvider.getIndexLifecycleManagerProvider(), secondaryFileSplitProvider,
                         secondaryTypeTraits, secondaryComparatorFactories, secondaryBloomFilterKeyFields,
-                        indexDataflowHelperFactory, localResourceFactoryProvider,
-                        dataset.getModificationCallbackFactory(storageComponentProvider, index, null,
-                                IndexOperation.CREATE, null),
+                        indexDataflowHelperFactory,
+                        localResourceFactoryProvider, dataset.getModificationCallbackFactory(storageComponentProvider,
+                                index, null, IndexOperation.CREATE, null),
                         storageComponentProvider.getMetadataPageManagerFactory());
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
                 secondaryPartitionConstraint);
@@ -122,7 +119,7 @@
 
     @Override
     public JobSpecification buildLoadingJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
         int[] fieldPermutation = createFieldPermutationForBulkLoadOp(index.getKeyFieldNames().size());
         IIndexDataflowHelperFactory dataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
@@ -269,8 +266,7 @@
                 sourceColumn = recordColumn + 1;
             }
             secondaryFieldAccessEvalFactories[i] = metadataProvider.getFormat().getFieldAccessEvaluatorFactory(
-                    isEnforcingKeyTypes ? enforcedItemType : sourceType, index.getKeyFieldNames().get(i),
-                    sourceColumn);
+                    isEnforcingKeyTypes ? enforcedItemType : sourceType, index.getKeyFieldNames().get(i), sourceColumn);
             Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(i),
                     index.getKeyFieldNames().get(i), sourceType);
             IAType keyType = keyTypePair.first;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
index efb3315..e5b5b9f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
@@ -25,7 +25,6 @@
 
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.context.ITransactionSubsystemProvider;
 import org.apache.asterix.common.context.TransactionSubsystemProvider;
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -53,7 +52,6 @@
 import org.apache.asterix.runtime.evaluators.functions.IsUnknownDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.NotDescriptor;
 import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.runtime.utils.RuntimeComponentsProvider;
 import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
 import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
@@ -118,7 +116,6 @@
     protected int[] secondaryBloomFilterKeyFields;
     protected RecordDescriptor secondaryRecDesc;
     protected IScalarEvaluatorFactory[] secondaryFieldAccessEvalFactories;
-    protected IPropertiesProvider propertiesProvider;
     protected ILSMMergePolicyFactory mergePolicyFactory;
     protected Map<String, String> mergePolicyFactoryProperties;
     protected RecordDescriptor enforcedRecDesc;
@@ -135,12 +132,11 @@
 
     // Prevent public construction. Should be created via createIndexCreator().
     protected SecondaryIndexOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
-            IPropertiesProvider propertiesProvider, MetadataProvider metadataProvider, ARecordType recType,
+            MetadataProvider metadataProvider, ARecordType recType,
             ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
         this.dataset = dataset;
         this.index = index;
         this.physOptConf = physOptConf;
-        this.propertiesProvider = propertiesProvider;
         this.metadataProvider = metadataProvider;
         this.itemType = recType;
         this.metaType = metaType;
@@ -151,17 +147,16 @@
     public static SecondaryIndexOperationsHelper createIndexOperationsHelper(Dataset dataset, Index index,
             MetadataProvider metadataProvider, PhysicalOptimizationConfig physOptConf, ARecordType recType,
             ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) throws AlgebricksException {
-        IPropertiesProvider asterixPropertiesProvider = AppContextInfo.INSTANCE;
         SecondaryIndexOperationsHelper indexOperationsHelper;
         switch (index.getIndexType()) {
             case BTREE:
                 indexOperationsHelper =
-                        new SecondaryBTreeOperationsHelper(dataset, index, physOptConf, asterixPropertiesProvider,
+                        new SecondaryBTreeOperationsHelper(dataset, index, physOptConf,
                                 metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
                 break;
             case RTREE:
                 indexOperationsHelper =
-                        new SecondaryRTreeOperationsHelper(dataset, index, physOptConf, asterixPropertiesProvider,
+                        new SecondaryRTreeOperationsHelper(dataset, index, physOptConf,
                                 metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
                 break;
             case SINGLE_PARTITION_WORD_INVIX:
@@ -169,8 +164,7 @@
             case LENGTH_PARTITIONED_WORD_INVIX:
             case LENGTH_PARTITIONED_NGRAM_INVIX:
                 indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(dataset, index, physOptConf,
-                        asterixPropertiesProvider, metadataProvider, recType, metaType, enforcedType,
-                        enforcedMetaType);
+                        metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
                 break;
             default:
                 throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, index.getIndexType());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
index 5ab36c1..985f8cc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
@@ -18,8 +18,9 @@
  */
 package org.apache.asterix.metadata.utils;
 
+import java.util.Map;
+
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.CompilationException;
@@ -73,8 +74,6 @@
 import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
 import org.apache.hyracks.storage.common.file.LocalResource;
 
-import java.util.Map;
-
 public class SecondaryInvertedIndexOperationsHelper extends SecondaryIndexOperationsHelper {
 
     private IAType secondaryKeyType;
@@ -92,11 +91,9 @@
     private int[] secondaryFilterFieldsForNonBulkLoadOps;
 
     protected SecondaryInvertedIndexOperationsHelper(Dataset dataset, Index index,
-            PhysicalOptimizationConfig physOptConf, IPropertiesProvider propertiesProvider,
-            MetadataProvider metadataProvider, ARecordType recType, ARecordType metaType, ARecordType enforcedType,
-            ARecordType enforcedMetaType) {
-        super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
-                enforcedMetaType);
+            PhysicalOptimizationConfig physOptConf, MetadataProvider metadataProvider, ARecordType recType,
+            ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
+        super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
     }
 
     @Override
@@ -107,9 +104,8 @@
         boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
         // Sanity checks.
         if (numPrimaryKeys > 1) {
-            throw new CompilationException(
-                    ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX, indexType,
-                    RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+            throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
+                    indexType, RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
         }
         if (numSecondaryKeys > 1) {
             throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, numSecondaryKeys,
@@ -126,16 +122,14 @@
         secondaryFieldAccessEvalFactories = new IScalarEvaluatorFactory[numSecondaryKeys + numFilterFields];
         ISerializerDeserializer[] secondaryRecFields =
                 new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys + numFilterFields];
-        ISerializerDeserializer[] enforcedRecFields =
-                new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
+        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
         secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
         ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
         ISerializerDeserializerProvider serdeProvider = FormatUtils.getDefaultFormat().getSerdeProvider();
         ITypeTraitProvider typeTraitProvider = FormatUtils.getDefaultFormat().getTypeTraitProvider();
         if (numSecondaryKeys > 0) {
             secondaryFieldAccessEvalFactories[0] = FormatUtils.getDefaultFormat().getFieldAccessEvaluatorFactory(
-                    isEnforcingKeyTypes ? enforcedItemType : itemType, index.getKeyFieldNames().get(0),
-                    numPrimaryKeys);
+                    isEnforcingKeyTypes ? enforcedItemType : itemType, index.getKeyFieldNames().get(0), numPrimaryKeys);
             Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
                     index.getKeyFieldNames().get(0), itemType);
             secondaryKeyType = keyTypePair.first;
@@ -226,7 +220,7 @@
 
     @Override
     public JobSpecification buildCreationJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
         IResourceFactory localResourceMetadata = new LSMInvertedIndexLocalResourceMetadataFactory(invListsTypeTraits,
@@ -234,8 +228,7 @@
                 dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties, filterTypeTraits,
                 filterCmpFactories, invertedIndexFields, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
                 invertedIndexFieldsForNonBulkLoadOps, dataset.getIndexOperationTrackerFactory(index),
-                dataset.getIoOperationCallbackFactory(index),
-                storageComponentProvider.getMetadataPageManagerFactory());
+                dataset.getIoOperationCallbackFactory(index), storageComponentProvider.getMetadataPageManagerFactory());
         ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                 localResourceMetadata, LocalResource.LSMInvertedIndexResource);
 
@@ -244,9 +237,9 @@
                 new LSMInvertedIndexCreateOperatorDescriptor(spec, storageComponentProvider.getStorageManager(),
                         secondaryFileSplitProvider, storageComponentProvider.getIndexLifecycleManagerProvider(),
                         tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, primaryComparatorFactories,
-                        tokenizerFactory, dataflowHelperFactory, localResourceFactoryProvider,
-                        dataset.getModificationCallbackFactory(storageComponentProvider, index, null,
-                                IndexOperation.CREATE, null),
+                        tokenizerFactory, dataflowHelperFactory,
+                        localResourceFactoryProvider, dataset.getModificationCallbackFactory(storageComponentProvider,
+                                index, null, IndexOperation.CREATE, null),
                         storageComponentProvider.getMetadataPageManagerFactory());
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, invIndexCreateOp,
                 secondaryPartitionConstraint);
@@ -257,7 +250,7 @@
 
     @Override
     public JobSpecification buildLoadingJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
 
         // Create dummy key provider for feeding the primary index scan.
         AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
@@ -317,8 +310,8 @@
         for (int i = 0; i < primaryKeyFields.length; i++) {
             primaryKeyFields[i] = numSecondaryKeys + i;
         }
-        BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenKeyPairRecDesc, tokenizerFactory, docField, primaryKeyFields, isPartitioned, false);
+        BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec, tokenKeyPairRecDesc,
+                tokenizerFactory, docField, primaryKeyFields, isPartitioned, false);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp,
                 primaryPartitionConstraint);
         return tokenizerOp;
@@ -364,7 +357,7 @@
 
     @Override
     public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         LSMInvertedIndexCompactOperator compactOp =
@@ -385,7 +378,7 @@
 
     @Override
     public JobSpecification buildDropJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
index 93a88e2..352bc6a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
@@ -22,7 +22,6 @@
 
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.GlobalConfig;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.transactions.IResourceFactory;
@@ -83,15 +82,15 @@
     protected RecordDescriptor secondaryRecDescForPointMBR = null;
 
     protected SecondaryRTreeOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
-            IPropertiesProvider propertiesProvider, MetadataProvider metadataProvider, ARecordType recType,
+            MetadataProvider metadataProvider, ARecordType recType,
             ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
-        super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
+        super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType,
                 enforcedMetaType);
     }
 
     @Override
     public JobSpecification buildCreationJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(
                 metadataProvider, index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
@@ -267,7 +266,7 @@
          * 3) Bulk-loading in RTree takes 4 doubles by reading 2 doubles twice and then,
          * do the same work as non-point MBR cases.
          ***************************************************/
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         int[] fieldPermutation = createFieldPermutationForBulkLoadOp(numNestedSecondaryKeyFields);
         int numNestedSecondaryKeFieldsConsideringPointMBR =
                 isPointMBR ? numNestedSecondaryKeyFields / 2 : numNestedSecondaryKeyFields;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java
index 2b025c9..c7af7ba 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java
@@ -19,7 +19,8 @@
 
 package org.apache.asterix.metadata.utils;
 
-import org.apache.asterix.common.config.IPropertiesProvider;
+import java.util.Map;
+
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
@@ -40,25 +41,17 @@
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
 
-import java.util.Map;
-
 public abstract class SecondaryTreeIndexOperationsHelper extends SecondaryIndexOperationsHelper {
 
-    protected SecondaryTreeIndexOperationsHelper(Dataset dataset,
-            Index index,
-            PhysicalOptimizationConfig physOptConf,
-            IPropertiesProvider propertiesProvider,
-            MetadataProvider metadataProvider,
-            ARecordType recType, ARecordType metaType,
-            ARecordType enforcedType,
+    protected SecondaryTreeIndexOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
+            MetadataProvider metadataProvider, ARecordType recType, ARecordType metaType, ARecordType enforcedType,
             ARecordType enforcedMetaType) {
-        super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
-                enforcedMetaType);
+        super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
     }
 
     @Override
     public JobSpecification buildDropJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
                 metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName());
@@ -82,9 +75,9 @@
 
     @Override
     public JobSpecification buildCompactJobSpec() throws AlgebricksException {
-        JobSpecification spec = RuntimeUtils.createJobSpecification();
-        IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(
-                metadataProvider, index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
+        JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+        IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
+                index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
         LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
                 metadataProvider.getStorageComponentProvider().getStorageManager(),
                 metadataProvider.getStorageComponentProvider().getIndexLifecycleManagerProvider(),
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
index 1841e5d..5bedffa 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
@@ -19,12 +19,13 @@
 
 package org.apache.asterix.metadata.valueextractors;
 
-import java.io.IOException;
+import java.rmi.RemoteException;
 
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.api.IMetadataEntityTupleTranslator;
 import org.apache.asterix.metadata.api.IValueExtractor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -38,7 +39,8 @@
     }
 
     @Override
-    public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
+    public T getValue(JobId jobId, ITupleReference tuple)
+            throws MetadataException, HyracksDataException, RemoteException {
         return tupleReaderWriter.getMetadataEntityFromTuple(tuple);
     }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
index 9f63ebf..32bed0d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
@@ -27,6 +27,7 @@
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.api.IValueExtractor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.util.string.UTF8StringReader;
 
@@ -48,19 +49,23 @@
     private final UTF8StringReader reader = new UTF8StringReader();
 
     @Override
-    public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
+    public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException {
         byte[] serRecord = tuple.getFieldData(2);
         int recordStartOffset = tuple.getFieldStart(2);
         int recordLength = tuple.getFieldLength(2);
         ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
         DataInput in = new DataInputStream(stream);
-        String nestedType = reader.readUTF(in);
-        if (nestedType.equals(datatypeName)) {
-            recordStartOffset = tuple.getFieldStart(1);
-            recordLength = tuple.getFieldLength(1);
-            stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
-            in = new DataInputStream(stream);
-            return reader.readUTF(in);
+        try {
+            String nestedType = reader.readUTF(in);
+            if (nestedType.equals(datatypeName)) {
+                recordStartOffset = tuple.getFieldStart(1);
+                recordLength = tuple.getFieldLength(1);
+                stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+                in = new DataInputStream(stream);
+                return reader.readUTF(in);
+            }
+        } catch (IOException e) {
+            throw HyracksDataException.create(e);
         }
         return null;
     }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
index 4c0f48f..fcf69d5 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
@@ -19,13 +19,13 @@
 
 package org.apache.asterix.metadata.valueextractors;
 
-import java.io.IOException;
 import java.nio.ByteBuffer;
 
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.api.IValueExtractor;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
@@ -48,7 +48,7 @@
     }
 
     @Override
-    public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
+    public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException {
         int numBytes = tupleWriter.bytesRequired(tuple);
         tupleBytes = new byte[numBytes];
         tupleWriter.writeTuple(tuple, tupleBytes, 0);
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
index dd40b04..5e58802 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
@@ -44,7 +44,6 @@
 
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.config.ReplicationProperties;
 import org.apache.asterix.common.context.IndexInfo;
 import org.apache.asterix.common.exceptions.ACIDException;
@@ -117,8 +116,8 @@
         this.replicationProperties = replicationProperties;
         this.appContextProvider = asterixAppRuntimeContextProvider;
         this.dsLifecycleManager = asterixAppRuntimeContextProvider.getDatasetLifecycleManager();
-        this.localResourceRep = (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider
-                .getLocalResourceRepository();
+        this.localResourceRep =
+                (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider.getLocalResourceRepository();
         lsmComponentRemoteLSN2LocalLSNMappingTaskQ = new LinkedBlockingQueue<>();
         pendingNotificationRemoteLogsQ = new LinkedBlockingQueue<>();
         lsmComponentId2PropertiesMap = new ConcurrentHashMap<>();
@@ -148,8 +147,8 @@
         try {
             serverSocketChannel = ServerSocketChannel.open();
             serverSocketChannel.configureBlocking(true);
-            InetSocketAddress replicationChannelAddress = new InetSocketAddress(InetAddress.getByName(nodeIP),
-                    dataPort);
+            InetSocketAddress replicationChannelAddress =
+                    new InetSocketAddress(InetAddress.getByName(nodeIP), dataPort);
             serverSocketChannel.socket().bind(replicationChannelAddress);
             lsmComponentLSNMappingService.start();
             replicationNotifier.start();
@@ -176,8 +175,9 @@
         if (remainingFile == 0) {
             if (lsmCompProp.getOpType() == LSMOperationType.FLUSH && lsmCompProp.getReplicaLSN() != null
                     && replicaUniqueLSN2RemoteMapping.containsKey(lsmCompProp.getNodeUniqueLSN())) {
-                int remainingIndexes = replicaUniqueLSN2RemoteMapping
-                        .get(lsmCompProp.getNodeUniqueLSN()).numOfFlushedIndexes.decrementAndGet();
+                int remainingIndexes =
+                        replicaUniqueLSN2RemoteMapping.get(lsmCompProp.getNodeUniqueLSN()).numOfFlushedIndexes
+                                .decrementAndGet();
                 if (remainingIndexes == 0) {
                     /**
                      * Note: there is a chance that this will never be removed because some
@@ -223,8 +223,8 @@
         public void run() {
             Thread.currentThread().setName("Replication Thread");
             try {
-                ReplicationRequestType replicationFunction = ReplicationProtocol.getRequestType(socketChannel,
-                        inBuffer);
+                ReplicationRequestType replicationFunction =
+                        ReplicationProtocol.getRequestType(socketChannel, inBuffer);
                 while (replicationFunction != ReplicationRequestType.GOODBYE) {
                     switch (replicationFunction) {
                         case REPLICATE_LOG:
@@ -288,8 +288,8 @@
             Set<Integer> datasetsToForceFlush = new HashSet<>();
             for (IndexInfo iInfo : openIndexesInfo) {
                 if (requestedIndexesToBeFlushed.contains(iInfo.getResourceId())) {
-                    AbstractLSMIOOperationCallback ioCallback = (AbstractLSMIOOperationCallback) iInfo.getIndex()
-                            .getIOOperationCallback();
+                    AbstractLSMIOOperationCallback ioCallback =
+                            (AbstractLSMIOOperationCallback) iInfo.getIndex().getIOOperationCallback();
                     //if an index has a pending flush, then the request to flush it will succeed.
                     if (ioCallback.hasPendingFlush()) {
                         //remove index to indicate that it will be flushed
@@ -380,8 +380,8 @@
             List<String> filesList;
             Set<Integer> partitionIds = request.getPartitionIds();
             Set<String> requesterExistingFiles = request.getExistingFiles();
-            Map<Integer, ClusterPartition> clusterPartitions = ((IPropertiesProvider) appContextProvider
-                    .getAppContext()).getMetadataProperties().getClusterPartitions();
+            Map<Integer, ClusterPartition> clusterPartitions =
+                    appContextProvider.getAppContext().getMetadataProperties().getClusterPartitions();
 
             final IReplicationStrategy repStrategy = replicationProperties.getReplicationStrategy();
             // Flush replicated datasets to generate the latest LSM components
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
index 447021c..da45e42 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
@@ -53,7 +53,6 @@
 import java.util.stream.Collectors;
 
 import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.config.ReplicationProperties;
 import org.apache.asterix.common.dataflow.LSMIndexUtil;
 import org.apache.asterix.common.replication.IReplicaResourcesManager;
@@ -149,8 +148,8 @@
         this.replicaResourcesManager = (ReplicaResourcesManager) remoteResoucesManager;
         this.asterixAppRuntimeContextProvider = asterixAppRuntimeContextProvider;
         this.logManager = logManager;
-        localResourceRepo = (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider
-                .getLocalResourceRepository();
+        localResourceRepo =
+                (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider.getLocalResourceRepository();
         this.hostIPAddressFirstOctet = replicationProperties.getReplicaIPAddress(nodeId).substring(0, 3);
         replicas = new HashMap<>();
         replicationJobsQ = new LinkedBlockingQueue<>();
@@ -170,8 +169,8 @@
         replicationListenerThreads = Executors.newCachedThreadPool();
         replicationJobsProcessor = new ReplicationJobsProccessor();
 
-        Map<String, ClusterPartition[]> nodePartitions = ((IPropertiesProvider) asterixAppRuntimeContextProvider
-                .getAppContext()).getMetadataProperties().getNodePartitions();
+        Map<String, ClusterPartition[]> nodePartitions =
+                asterixAppRuntimeContextProvider.getAppContext().getMetadataProperties().getNodePartitions();
         replica2PartitionsMap = new HashMap<>(replicaNodes.size());
         for (Replica replica : replicaNodes) {
             replicas.put(replica.getId(), replica);
@@ -347,8 +346,7 @@
                             requestBuffer = ReplicationProtocol.writeFileReplicationRequest(requestBuffer,
                                     asterixFileProperties, ReplicationRequestType.REPLICATE_FILE);
 
-                            Iterator<Map.Entry<String, SocketChannel>> iterator =
-                                    replicasSockets.entrySet().iterator();
+                            Iterator<Map.Entry<String, SocketChannel>> iterator = replicasSockets.entrySet().iterator();
                             while (iterator.hasNext()) {
                                 Map.Entry<String, SocketChannel> entry = iterator.next();
                                 //if the remote replica is not interested in this partition, skip it.
@@ -798,8 +796,7 @@
 
             //if got ACKs from all remote replicas, notify pending jobs if any
 
-            if (jobCommitAcks.get(jobId).size() == replicationFactor
-                    && replicationJobsPendingAcks.containsKey(jobId)) {
+            if (jobCommitAcks.get(jobId).size() == replicationFactor && replicationJobsPendingAcks.containsKey(jobId)) {
                 ILogRecord pendingLog = replicationJobsPendingAcks.get(jobId);
                 synchronized (pendingLog) {
                     pendingLog.notifyAll();
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
index 7441ec7..7c7a050 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
@@ -29,11 +29,10 @@
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.config.ClusterProperties;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.config.ReplicationProperties;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
@@ -48,11 +47,11 @@
 
     private final IReplicationManager replicationManager;
     private static final Logger LOGGER = Logger.getLogger(RemoteRecoveryManager.class.getName());
-    private final IAppRuntimeContext runtimeContext;
+    private final INcApplicationContext runtimeContext;
     private final ReplicationProperties replicationProperties;
     private Map<String, Set<String>> failbackRecoveryReplicas;
 
-    public RemoteRecoveryManager(IReplicationManager replicationManager, IAppRuntimeContext runtimeContext,
+    public RemoteRecoveryManager(IReplicationManager replicationManager, INcApplicationContext runtimeContext,
             ReplicationProperties replicationProperties) {
         this.replicationManager = replicationManager;
         this.runtimeContext = runtimeContext;
@@ -156,8 +155,8 @@
         replayReplicaPartitionLogs(partitionsToTakeover, false);
 
         //mark these partitions as active in this node
-        PersistentLocalResourceRepository resourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                .getLocalResourceRepository();
+        PersistentLocalResourceRepository resourceRepository =
+                (PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository();
         for (Integer patitionId : partitions) {
             resourceRepository.addActivePartition(patitionId);
         }
@@ -166,11 +165,10 @@
     @Override
     public void startFailbackProcess() {
         int maxRecoveryAttempts = replicationProperties.getMaxRemoteRecoveryAttempts();
-        PersistentLocalResourceRepository resourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                .getLocalResourceRepository();
+        PersistentLocalResourceRepository resourceRepository =
+                (PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository();
         IDatasetLifecycleManager datasetLifeCycleManager = runtimeContext.getDatasetLifecycleManager();
-        Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties()
-                .getNodePartitions();
+        Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties().getNodePartitions();
 
         while (true) {
             //start recovery steps
@@ -225,10 +223,9 @@
     @Override
     public void completeFailbackProcess() throws IOException, InterruptedException {
         ILogManager logManager = runtimeContext.getTransactionSubsystem().getLogManager();
-        ReplicaResourcesManager replicaResourcesManager = (ReplicaResourcesManager) runtimeContext
-                .getReplicaResourcesManager();
-        Map<String, ClusterPartition[]> nodePartitions = ((IPropertiesProvider) runtimeContext).getMetadataProperties()
-                .getNodePartitions();
+        ReplicaResourcesManager replicaResourcesManager =
+                (ReplicaResourcesManager) runtimeContext.getReplicaResourcesManager();
+        Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties().getNodePartitions();
 
         /**
          * for each lost partition, get the remaining files from replicas
@@ -281,8 +278,8 @@
     @Override
     public void doRemoteRecoveryPlan(Map<String, Set<Integer>> recoveryPlan) throws HyracksDataException {
         int maxRecoveryAttempts = replicationProperties.getMaxRemoteRecoveryAttempts();
-        PersistentLocalResourceRepository resourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                .getLocalResourceRepository();
+        PersistentLocalResourceRepository resourceRepository =
+                (PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository();
         IDatasetLifecycleManager datasetLifeCycleManager = runtimeContext.getDatasetLifecycleManager();
         ILogManager logManager = runtimeContext.getTransactionSubsystem().getLogManager();
         while (true) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
index b182add..2749b5a 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.runtime.job.listener;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.transactions.DatasetId;
 import org.apache.asterix.common.transactions.ITransactionContext;
@@ -51,7 +51,7 @@
             @Override
             public void jobletFinish(JobStatus jobStatus) {
                 try {
-                    ITransactionManager txnManager = ((IAppRuntimeContext) jobletContext.getServiceContext()
+                    ITransactionManager txnManager = ((INcApplicationContext) jobletContext.getServiceContext()
                             .getApplicationContext()).getTransactionSubsystem().getTransactionManager();
                     ITransactionContext txnContext = txnManager.getTransactionContext(jobId, false);
                     txnContext.setWriteTxn(transactionalWrite);
@@ -65,7 +65,7 @@
             @Override
             public void jobletStart() {
                 try {
-                    ((IAppRuntimeContext) jobletContext.getServiceContext().getApplicationContext())
+                    ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
                             .getTransactionSubsystem().getTransactionManager().getTransactionContext(jobId, true);
                 } catch (ACIDException e) {
                     throw new Error(e);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
index c194d64..f41f326 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
@@ -20,7 +20,7 @@
 
 import java.util.List;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.transactions.DatasetId;
 import org.apache.asterix.common.transactions.ITransactionContext;
@@ -54,7 +54,7 @@
             public void jobletFinish(JobStatus jobStatus) {
                 try {
                     ITransactionManager txnManager =
-                            ((IAppRuntimeContext) jobletContext.getServiceContext().getApplicationContext())
+                            ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
                                     .getTransactionSubsystem().getTransactionManager();
                     for (JobId jobId : jobIds) {
                         ITransactionContext txnContext = txnManager.getTransactionContext(jobId, false);
@@ -71,7 +71,7 @@
             public void jobletStart() {
                 try {
                     for (JobId jobId : jobIds) {
-                        ((IAppRuntimeContext) jobletContext.getServiceContext().getApplicationContext())
+                        ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
                                 .getTransactionSubsystem().getTransactionManager().getTransactionContext(jobId, true);
                     }
                 } catch (ACIDException e) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
index 8eb3663..fe230b4 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
@@ -18,16 +18,15 @@
  */
 package org.apache.asterix.runtime.message;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.common.replication.Replica;
 import org.apache.asterix.common.replication.ReplicaEvent;
 import org.apache.asterix.event.schema.cluster.Node;
 import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class ReplicaEventMessage implements IApplicationMessage {
+public class ReplicaEventMessage implements INcAddressedMessage {
 
     private static final long serialVersionUID = 1L;
     private final String nodeId;
@@ -53,8 +52,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+    public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
         Node node = new Node();
         node.setId(nodeId);
         node.setClusterIp(nodeIPAddress);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
index fc2650e..277c0ba 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
@@ -21,17 +21,16 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.messaging.api.INCMessageBroker;
 import org.apache.asterix.common.metadata.MetadataIndexImmutableProperties;
 import org.apache.asterix.common.transactions.IResourceIdManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 import org.apache.hyracks.control.nc.NodeControllerService;
 
-public class ReportMaxResourceIdMessage implements IApplicationMessage {
+public class ReportMaxResourceIdMessage implements ICcAddressedMessage {
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(ReportMaxResourceIdMessage.class.getName());
     private final long maxResourceId;
@@ -47,15 +46,14 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IResourceIdManager resourceIdManager =
-                AppContextInfo.INSTANCE.getResourceIdManager();
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        IResourceIdManager resourceIdManager = appCtx.getResourceIdManager();
         resourceIdManager.report(src, maxResourceId);
     }
 
     public static void send(NodeControllerService cs) throws HyracksDataException {
         NodeControllerService ncs = cs;
-        IAppRuntimeContext appContext = (IAppRuntimeContext) ncs.getApplicationContext();
+        INcApplicationContext appContext = (INcApplicationContext) ncs.getApplicationContext();
         long maxResourceId = Math.max(appContext.getLocalResourceRepository().maxId(),
                 MetadataIndexImmutableProperties.FIRST_AVAILABLE_USER_DATASET_ID);
         ReportMaxResourceIdMessage maxResourceIdMsg = new ReportMaxResourceIdMessage(ncs.getId(), maxResourceId);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
index a1290df..a43376d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
@@ -18,17 +18,17 @@
  */
 package org.apache.asterix.runtime.message;
 
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 import org.apache.hyracks.control.nc.NodeControllerService;
 
-public class ReportMaxResourceIdRequestMessage implements IApplicationMessage {
+public class ReportMaxResourceIdRequestMessage implements INcAddressedMessage {
     private static final long serialVersionUID = 1L;
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        ReportMaxResourceIdMessage.send((NodeControllerService) cs);
+    public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+        ReportMaxResourceIdMessage.send((NodeControllerService) appCtx.getServiceContext().getControllerService());
     }
 
     @Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
index c8aef37..194fd59 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
@@ -20,15 +20,14 @@
 
 import java.util.Set;
 
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
 import org.apache.asterix.common.transactions.IResourceIdManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class ResourceIdRequestMessage implements IApplicationMessage {
+public class ResourceIdRequestMessage implements ICcAddressedMessage {
     private static final long serialVersionUID = 1L;
     private final String src;
 
@@ -37,17 +36,15 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
+    public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
         try {
-            ICCMessageBroker broker =
-                    (ICCMessageBroker) AppContextInfo.INSTANCE.getCCServiceContext().getMessageBroker();
+            ICCMessageBroker broker = (ICCMessageBroker) appCtx.getServiceContext().getMessageBroker();
             ResourceIdRequestResponseMessage reponse = new ResourceIdRequestResponseMessage();
             if (!ClusterStateManager.INSTANCE.isClusterActive()) {
                 reponse.setResourceId(-1);
                 reponse.setException(new Exception("Cannot generate global resource id when cluster is not active."));
             } else {
-                IResourceIdManager resourceIdManager =
-                        AppContextInfo.INSTANCE.getResourceIdManager();
+                IResourceIdManager resourceIdManager = appCtx.getResourceIdManager();
                 reponse.setResourceId(resourceIdManager.createResourceId());
                 if (reponse.getResourceId() < 0) {
                     reponse.setException(new Exception("One or more nodes has not reported max resource id."));
@@ -60,8 +57,7 @@
         }
     }
 
-    private void requestMaxResourceID(IResourceIdManager resourceIdManager, ICCMessageBroker broker)
-            throws Exception {
+    private void requestMaxResourceID(IResourceIdManager resourceIdManager, ICCMessageBroker broker) throws Exception {
         Set<String> getParticipantNodes = ClusterStateManager.INSTANCE.getParticipantNodes();
         ReportMaxResourceIdRequestMessage msg = new ReportMaxResourceIdRequestMessage();
         for (String nodeId : getParticipantNodes) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java
index 1106da9..6a9ed35 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java
@@ -18,13 +18,12 @@
  */
 package org.apache.asterix.runtime.message;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
 import org.apache.asterix.runtime.transaction.GlobalResourceIdFactory;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
 
-public class ResourceIdRequestResponseMessage implements IApplicationMessage {
+public class ResourceIdRequestResponseMessage implements INcAddressedMessage {
     private static final long serialVersionUID = 1L;
 
     private long resourceId;
@@ -47,8 +46,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) cs.getApplicationContext();
+    public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
         ((GlobalResourceIdFactory) appCtx.getResourceIdFactory()).addNewIds(this);
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
index 1f18c97..ed1a247 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
@@ -22,7 +22,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.dataflow.LSMIndexUtil;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -52,7 +52,6 @@
 import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -158,8 +157,8 @@
 
             cursor = indexAccessor.createSearchCursor(false);
             frameTuple = new FrameTupleReference();
-            IAppRuntimeContext appCtx =
-                    (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+            INcApplicationContext appCtx =
+                    (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
             LSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) index,
                     appCtx.getTransactionSubsystem().getLogManager());
             frameOpCallback =
@@ -275,8 +274,8 @@
             // callback here before calling nextFrame on the next operator
             frameOpCallback.frameCompleted(!firstModification);
             appender.write(writer, true);
-        } catch (IndexException | IOException | AsterixException e) {
-            throw new HyracksDataException(e);
+        } catch (Exception e) {
+            throw HyracksDataException.create(e);
         }
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java
index aca6455..85e92c3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java
@@ -20,7 +20,7 @@
 
 import java.nio.ByteBuffer;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.transactions.DatasetId;
@@ -72,7 +72,7 @@
             @Override
             public void close() throws HyracksDataException {
                 try {
-                    IAppRuntimeContext appCtx = (IAppRuntimeContext) ctx.getJobletContext()
+                    INcApplicationContext appCtx = (INcApplicationContext) ctx.getJobletContext()
                             .getServiceContext().getApplicationContext();
                     IDatasetLifecycleManager datasetLifeCycleManager = appCtx.getDatasetLifecycleManager();
                     ILockManager lockManager = appCtx.getTransactionSubsystem().getLockManager();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
similarity index 74%
rename from asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
rename to asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
index ad510e4..8608d68 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
@@ -20,7 +20,6 @@
 
 import java.io.IOException;
 import java.util.function.Supplier;
-import java.util.logging.Logger;
 
 import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
 import org.apache.asterix.common.config.ActiveProperties;
@@ -28,7 +27,6 @@
 import org.apache.asterix.common.config.CompilerProperties;
 import org.apache.asterix.common.config.ExtensionProperties;
 import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.config.MessagingProperties;
 import org.apache.asterix.common.config.MetadataProperties;
 import org.apache.asterix.common.config.NodeProperties;
@@ -36,7 +34,7 @@
 import org.apache.asterix.common.config.ReplicationProperties;
 import org.apache.asterix.common.config.StorageProperties;
 import org.apache.asterix.common.config.TransactionProperties;
-import org.apache.asterix.common.dataflow.IApplicationContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.common.metadata.IMetadataBootstrap;
@@ -44,6 +42,7 @@
 import org.apache.asterix.common.transactions.IResourceIdManager;
 import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.IJobLifecycleListener;
 import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import org.apache.hyracks.storage.common.IStorageManager;
 
@@ -52,9 +51,8 @@
  * instances that are accessed from the NCs. In addition an instance of ICCApplicationContext
  * is stored for access by the CC.
  */
-public class AppContextInfo implements IApplicationContextInfo, IPropertiesProvider {
+public class CcApplicationContext implements ICcApplicationContext {
 
-    public static final AppContextInfo INSTANCE = new AppContextInfo();
     private ICCServiceContext ccServiceCtx;
     private IGlobalRecoveryManager globalRecoveryManager;
     private ILibraryManager libraryManager;
@@ -73,51 +71,41 @@
     private Supplier<IMetadataBootstrap> metadataBootstrapSupplier;
     private IHyracksClientConnection hcc;
     private Object extensionManager;
-    private volatile boolean initialized = false;
     private IFaultToleranceStrategy ftStrategy;
+    private IJobLifecycleListener activeLifeCycleListener;
 
-    private AppContextInfo() {
-    }
-
-    public static synchronized void initialize(ICCServiceContext ccServiceCtx, IHyracksClientConnection hcc,
+    public CcApplicationContext(ICCServiceContext ccServiceCtx, IHyracksClientConnection hcc,
             ILibraryManager libraryManager, IResourceIdManager resourceIdManager,
             Supplier<IMetadataBootstrap> metadataBootstrapSupplier, IGlobalRecoveryManager globalRecoveryManager,
-            IFaultToleranceStrategy ftStrategy)
+            IFaultToleranceStrategy ftStrategy, IJobLifecycleListener activeLifeCycleListener)
             throws AsterixException, IOException {
-        if (INSTANCE.initialized) {
-            throw new AsterixException(AppContextInfo.class.getSimpleName() + " has been initialized already");
-        }
-        INSTANCE.initialized = true;
-        INSTANCE.ccServiceCtx = ccServiceCtx;
-        INSTANCE.hcc = hcc;
-        INSTANCE.libraryManager = libraryManager;
-        INSTANCE.resourceIdManager = resourceIdManager;
+        this.ccServiceCtx = ccServiceCtx;
+        this.hcc = hcc;
+        this.libraryManager = libraryManager;
+        this.resourceIdManager = resourceIdManager;
+        this.activeLifeCycleListener = activeLifeCycleListener;
         // Determine whether to use old-style asterix-configuration.xml or new-style configuration.
         // QQQ strip this out eventually
         PropertiesAccessor propertiesAccessor = PropertiesAccessor.getInstance(ccServiceCtx.getAppConfig());
-        INSTANCE.compilerProperties = new CompilerProperties(propertiesAccessor);
-        INSTANCE.externalProperties = new ExternalProperties(propertiesAccessor);
-        INSTANCE.metadataProperties = new MetadataProperties(propertiesAccessor);
-        INSTANCE.storageProperties = new StorageProperties(propertiesAccessor);
-        INSTANCE.txnProperties = new TransactionProperties(propertiesAccessor);
-        INSTANCE.activeProperties = new ActiveProperties(propertiesAccessor);
-        INSTANCE.extensionProperties = new ExtensionProperties(propertiesAccessor);
-        INSTANCE.replicationProperties = new ReplicationProperties(propertiesAccessor);
-        INSTANCE.ftStrategy = ftStrategy;
-        INSTANCE.hcc = hcc;
-        INSTANCE.buildProperties = new BuildProperties(propertiesAccessor);
-        INSTANCE.messagingProperties = new MessagingProperties(propertiesAccessor);
-        INSTANCE.nodeProperties = new NodeProperties(propertiesAccessor);
-        INSTANCE.metadataBootstrapSupplier = metadataBootstrapSupplier;
-        INSTANCE.globalRecoveryManager = globalRecoveryManager;
-    }
-
-    public boolean initialized() {
-        return initialized;
+        compilerProperties = new CompilerProperties(propertiesAccessor);
+        externalProperties = new ExternalProperties(propertiesAccessor);
+        metadataProperties = new MetadataProperties(propertiesAccessor);
+        storageProperties = new StorageProperties(propertiesAccessor);
+        txnProperties = new TransactionProperties(propertiesAccessor);
+        activeProperties = new ActiveProperties(propertiesAccessor);
+        extensionProperties = new ExtensionProperties(propertiesAccessor);
+        replicationProperties = new ReplicationProperties(propertiesAccessor);
+        this.ftStrategy = ftStrategy;
+        this.hcc = hcc;
+        this.buildProperties = new BuildProperties(propertiesAccessor);
+        this.messagingProperties = new MessagingProperties(propertiesAccessor);
+        this.nodeProperties = new NodeProperties(propertiesAccessor);
+        this.metadataBootstrapSupplier = metadataBootstrapSupplier;
+        this.globalRecoveryManager = globalRecoveryManager;
     }
 
     @Override
-    public ICCServiceContext getCCServiceContext() {
+    public ICCServiceContext getServiceContext() {
         return ccServiceCtx;
     }
 
@@ -156,6 +144,7 @@
         return buildProperties;
     }
 
+    @Override
     public IHyracksClientConnection getHcc() {
         return hcc;
     }
@@ -207,6 +196,7 @@
         return nodeProperties;
     }
 
+    @Override
     public IResourceIdManager getResourceIdManager() {
         return resourceIdManager;
     }
@@ -218,4 +208,9 @@
     public IFaultToleranceStrategy getFaultToleranceStrategy() {
         return ftStrategy;
     }
+
+    @Override
+    public IJobLifecycleListener getActiveLifecycleListener() {
+        return activeLifeCycleListener;
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index 6bfbf77..2d8a04d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -76,17 +76,19 @@
     private boolean metadataNodeActive = false;
     private Set<String> failedNodes = new HashSet<>();
     private IFaultToleranceStrategy ftStrategy;
+    private CcApplicationContext appCtx;
 
     private ClusterStateManager() {
         cluster = ClusterProperties.INSTANCE.getCluster();
-        // if this is the CC process
-        if (AppContextInfo.INSTANCE.initialized() && AppContextInfo.INSTANCE.getCCServiceContext() != null) {
-            node2PartitionsMap = AppContextInfo.INSTANCE.getMetadataProperties().getNodePartitions();
-            clusterPartitions = AppContextInfo.INSTANCE.getMetadataProperties().getClusterPartitions();
-            currentMetadataNode = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataNodeName();
-            ftStrategy = AppContextInfo.INSTANCE.getFaultToleranceStrategy();
-            ftStrategy.bindTo(this);
-        }
+    }
+
+    public void setCcAppCtx(CcApplicationContext appCtx) {
+        this.appCtx = appCtx;
+        node2PartitionsMap = appCtx.getMetadataProperties().getNodePartitions();
+        clusterPartitions = appCtx.getMetadataProperties().getClusterPartitions();
+        currentMetadataNode = appCtx.getMetadataProperties().getMetadataNodeName();
+        ftStrategy = appCtx.getFaultToleranceStrategy();
+        ftStrategy.bindTo(this);
     }
 
     public synchronized void removeNCConfiguration(String nodeId) throws HyracksException {
@@ -162,12 +164,12 @@
 
         // if all storage partitions are active as well as the metadata node, then the cluster is active
         if (metadataNodeActive) {
-            AppContextInfo.INSTANCE.getMetadataBootstrap().init();
+            appCtx.getMetadataBootstrap().init();
             setState(ClusterState.ACTIVE);
             LOGGER.info("Cluster is now " + state);
             notifyAll();
             // start global recovery
-            AppContextInfo.INSTANCE.getGlobalRecoveryManager().startGlobalRecovery();
+            appCtx.getGlobalRecoveryManager().startGlobalRecovery(appCtx);
         }
     }
 
@@ -210,7 +212,7 @@
             }
             return new String[0];
         }
-        return (String [])ncConfig.get(NCConfig.Option.IODEVICES);
+        return (String[]) ncConfig.get(NCConfig.Option.IODEVICES);
     }
 
     @Override
@@ -245,8 +247,8 @@
                 clusterActiveLocations.add(p.getActiveNodeId());
             }
         }
-        clusterPartitionConstraint = new AlgebricksAbsolutePartitionConstraint(
-                clusterActiveLocations.toArray(new String[] {}));
+        clusterPartitionConstraint =
+                new AlgebricksAbsolutePartitionConstraint(clusterActiveLocations.toArray(new String[] {}));
     }
 
     public boolean isGlobalRecoveryCompleted() {
@@ -265,8 +267,8 @@
         return state == ClusterState.ACTIVE;
     }
 
-    public static int getNumberOfNodes() {
-        return AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames().size();
+    public int getNumberOfNodes() {
+        return appCtx.getMetadataProperties().getNodeNames().size();
     }
 
     @Override
@@ -294,13 +296,13 @@
         return metadataNodeActive;
     }
 
-    public synchronized ObjectNode getClusterStateDescription()  {
+    public synchronized ObjectNode getClusterStateDescription() {
         ObjectMapper om = new ObjectMapper();
         ObjectNode stateDescription = om.createObjectNode();
         stateDescription.put("state", state.name());
         stateDescription.put("metadata_node", currentMetadataNode);
         ArrayNode ncs = om.createArrayNode();
-        stateDescription.set("ncs",ncs);
+        stateDescription.set("ncs", ncs);
         for (Map.Entry<String, ClusterPartition[]> entry : node2PartitionsMap.entrySet()) {
             ObjectNode nodeJSON = om.createObjectNode();
             nodeJSON.put("node_id", entry.getKey());
@@ -318,9 +320,7 @@
                 }
             }
             nodeJSON.put("state", failedNodes.contains(entry.getKey()) ? "FAILED"
-                    : allActive ? "ACTIVE"
-                    : anyActive ? "PARTIALLY_ACTIVE"
-                    : "INACTIVE");
+                    : allActive ? "ACTIVE" : anyActive ? "PARTIALLY_ACTIVE" : "INACTIVE");
             nodeJSON.putPOJO("partitions", partitions);
             ncs.add(nodeJSON);
         }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java
index 387e949..353a45c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.runtime.utils;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
@@ -42,37 +42,37 @@
 
     @Override
     public ILSMIOOperationScheduler getIOScheduler(IHyracksTaskContext ctx) {
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getLSMIOScheduler();
     }
 
     @Override
     public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getBufferCache();
     }
 
     @Override
     public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getFileMapManager();
     }
 
     @Override
     public ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx) {
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getLocalResourceRepository();
     }
 
     @Override
     public IDatasetLifecycleManager getLifecycleManager(IHyracksTaskContext ctx) {
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getDatasetLifecycleManager();
     }
 
     @Override
     public IResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
-        return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+        return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
                 .getResourceIdFactory();
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java
index a2a191d..85e93b8 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java
@@ -27,6 +27,7 @@
 import java.util.Set;
 
 import org.apache.asterix.common.config.CompilerProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -37,13 +38,14 @@
     private RuntimeUtils() {
     }
 
-    public static Set<String> getNodeControllersOnIP(InetAddress ipAddress) throws HyracksDataException {
-        Map<InetAddress, Set<String>> nodeControllerInfo = getNodeControllerMap();
+    public static Set<String> getNodeControllersOnIP(ICcApplicationContext appCtx, InetAddress ipAddress)
+            throws HyracksDataException {
+        Map<InetAddress, Set<String>> nodeControllerInfo = getNodeControllerMap(appCtx);
         return nodeControllerInfo.get(ipAddress);
     }
 
-    public static List<String> getAllNodeControllers() throws HyracksDataException {
-        Collection<Set<String>> nodeControllersCollection = getNodeControllerMap().values();
+    public static List<String> getAllNodeControllers(ICcApplicationContext appCtx) throws HyracksDataException {
+        Collection<Set<String>> nodeControllersCollection = getNodeControllerMap(appCtx).values();
         List<String> nodeControllers = new ArrayList<>();
         for (Set<String> ncCollection : nodeControllersCollection) {
             nodeControllers.addAll(ncCollection);
@@ -51,21 +53,21 @@
         return nodeControllers;
     }
 
-    public static Map<InetAddress, Set<String>> getNodeControllerMap() throws HyracksDataException {
+    public static Map<InetAddress, Set<String>> getNodeControllerMap(ICcApplicationContext appCtx)
+            throws HyracksDataException {
         Map<InetAddress, Set<String>> map = new HashMap<>();
-        AppContextInfo.INSTANCE.getCCServiceContext().getCCContext().getIPAddressNodeMap(map);
+        appCtx.getServiceContext().getCCContext().getIPAddressNodeMap(map);
         return map;
     }
 
-    public static void getNodeControllerMap(Map<InetAddress, Set<String>> map) {
-        ClusterControllerService ccs =
-                (ClusterControllerService) AppContextInfo.INSTANCE.getCCServiceContext().getControllerService();
+    public static void getNodeControllerMap(ICcApplicationContext appCtx, Map<InetAddress, Set<String>> map) {
+        ClusterControllerService ccs = (ClusterControllerService) appCtx.getServiceContext().getControllerService();
         INodeManager nodeManager = ccs.getNodeManager();
         map.putAll(nodeManager.getIpAddressNodeNameMap());
     }
 
-    public static JobSpecification createJobSpecification() {
-        CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
+    public static JobSpecification createJobSpecification(ICcApplicationContext appCtx) {
+        CompilerProperties compilerProperties = appCtx.getCompilerProperties();
         int frameSize = compilerProperties.getFrameSize();
         return new JobSpecification(frameSize);
     }
diff --git a/asterixdb/asterix-server/src/main/opt/ansible/bin/udf.sh b/asterixdb/asterix-server/src/main/opt/ansible/bin/udf.sh
new file mode 100755
index 0000000..2f8b6fe
--- /dev/null
+++ b/asterixdb/asterix-server/src/main/opt/ansible/bin/udf.sh
@@ -0,0 +1,72 @@
+#!/bin/bash
+# ------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ------------------------------------------------------------
+
+# Get options
+usage() { echo "./udf.sh -m [i|u] -d DATAVERSE_NAME -l LIBRARY_NAME [-p UDF_PACKAGE_PATH]" 1>&2; exit 1; }
+
+while getopts ":d:l:p:m:" o; do
+  case "${o}" in
+    m)
+        mode=${OPTARG}
+        ;;
+    d)
+        dname=${OPTARG}
+        ;;
+    l)
+        libname=${OPTARG}
+        ;;
+    p)
+        tarpath=${OPTARG}
+        ;;
+    *)
+        echo $o
+        usage
+        ;;
+  esac
+done
+shift $((OPTIND-1))
+
+if [[ -z $dname ]] || [[ -z $libname ]]; then
+    echo "Dataverse name or Library name is missing"
+fi
+
+# Gets the absolute path so that the script can work no matter where it is invoked.
+pushd `dirname $0` > /dev/null
+SCRIPT_PATH=`pwd -P`
+popd > /dev/null
+ANSB_PATH=`dirname "${SCRIPT_PATH}"`
+
+INVENTORY=$ANSB_PATH/conf/inventory
+
+# Deploy/Destroy the UDF package on all nodes.
+export ANSIBLE_HOST_KEY_CHECKING=false
+if [[ $mode = "i" ]]; then
+    if [[ -z $tarpath ]]; then
+        echo "UDF package path is undefined"
+    else
+        echo "Install library to $dname.$libname"
+        ansible-playbook -i $INVENTORY $ANSB_PATH/yaml/deploy_udf.yml --extra-vars "dataverse=$dname libname=$libname package_path=$tarpath"
+    fi
+elif [[ $mode == "u" ]]; then
+    echo "Uninstall library in $dname.$libname"
+    ansible-playbook -i $INVENTORY $ANSB_PATH/yaml/destroy_udf.yml --extra-vars "dataverse=$dname libname=$libname"
+else
+    echo "Wrong mode"
+fi
\ No newline at end of file
diff --git a/asterixdb/asterix-server/src/main/opt/ansible/yaml/deploy_udf.yml b/asterixdb/asterix-server/src/main/opt/ansible/yaml/deploy_udf.yml
new file mode 100644
index 0000000..e9300ea
--- /dev/null
+++ b/asterixdb/asterix-server/src/main/opt/ansible/yaml/deploy_udf.yml
@@ -0,0 +1,30 @@
+# ------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ------------------------------------------------------------
+
+- name: Deploy the UDF library {{ dataverse }}.{{ libname }} to the cluster
+  hosts: all
+  tasks:
+    - include_vars: ../conf/instance_settings.yml
+    - file:
+        path: "{{ binarydir }}/lib/udfs/{{ dataverse }}/{{ libname }}"
+        state: directory
+        mode: 0755
+    - unarchive:
+        src: "{{ package_path }}"
+        dest: "{{ binarydir }}/lib/udfs/{{ dataverse }}/{{ libname }}"
diff --git a/asterixdb/asterix-server/src/main/opt/ansible/yaml/destroy_udf.yml b/asterixdb/asterix-server/src/main/opt/ansible/yaml/destroy_udf.yml
new file mode 100644
index 0000000..cc4add3
--- /dev/null
+++ b/asterixdb/asterix-server/src/main/opt/ansible/yaml/destroy_udf.yml
@@ -0,0 +1,33 @@
+# ------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ------------------------------------------------------------
+
+- name: Destroy the library {{ libname }} in dataverse {{ dataverse }}
+  hosts: all
+  tasks:
+    - include_vars: ../conf/instance_settings.yml
+    - file:
+        path: "{{ binarydir }}/lib/udfs/uninstall/"
+        state: directory
+        mode: 0755
+    - file:
+        state: touch
+        path: "{{ binarydir }}/lib/udfs/uninstall/{{ dataverse }}.{{ libname }}"
+    - file:
+        path: "{{ binarydir }}/lib/udfs/{{ dataverse }}/{{ libname }}"
+        state: absent
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java
index c553bc0..5e7cf97 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java
@@ -19,7 +19,7 @@
 
 package org.apache.asterix.transaction.management.opcallbacks;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
@@ -38,7 +38,7 @@
     @Override
     public ILSMOperationTracker getOperationTracker(INCServiceContext ctx) {
         IDatasetLifecycleManager dslcManager =
-                ((IAppRuntimeContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
+                ((INcApplicationContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
         return dslcManager.getOperationTracker(datasetID);
     }
 
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java
index 4832acd..febcac2 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.transaction.management.opcallbacks;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.context.BaseOperationTracker;
 import org.apache.hyracks.api.application.INCServiceContext;
@@ -38,7 +38,7 @@
     @Override
     public ILSMOperationTracker getOperationTracker(INCServiceContext ctx) {
         IDatasetLifecycleManager dslcManager =
-                ((IAppRuntimeContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
+                ((INcApplicationContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
         return new BaseOperationTracker(datasetID, dslcManager.getDatasetInfo(datasetID));
     }
 
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
index d10a9a9..0fca60d 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
@@ -20,7 +20,7 @@
 
 import java.util.Map;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -52,7 +52,7 @@
     @Override
     public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+        INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
         IIOManager ioManager = appCtx.getIOManager();
         FileReference file = ioManager.resolve(resource.getPath());
         return LSMBTreeUtil.createExternalBTree(ioManager, file, appCtx.getBufferCache(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
index fd7ff0f..aa82113 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
@@ -20,7 +20,7 @@
 
 import java.util.Map;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.transactions.Resource;
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -67,7 +67,7 @@
     @Override
     public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+        INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
         IIOManager ioManager = serviceCtx.getIoManager();
         FileReference file = ioManager.resolve(resource.getPath());
         return LSMBTreeUtil.createExternalBTreeWithBuddy(ioManager, file, appCtx.getBufferCache(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
index 54cd97d..75516ff 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
@@ -20,7 +20,7 @@
 
 import java.util.Map;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
@@ -30,7 +30,6 @@
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -61,20 +60,15 @@
     @Override
     public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+        INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
         IIOManager ioManager = appCtx.getIOManager();
         FileReference file = ioManager.resolve(resource.getPath());
-        try {
-            return LSMRTreeUtils.createExternalRTree(ioManager, file, appCtx.getBufferCache(),
-                    appCtx.getFileMapManager(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                    valueProviderFactories, rtreePolicyType, appCtx.getBloomFilterFalsePositiveRate(),
-                    mergePolicyFactory.createMergePolicy(mergePolicyProperties,
-                            appCtx.getDatasetLifecycleManager()),
-                    opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
-                    ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, -1, true, isPointMBR,
-                    metadataPageManagerFactory);
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
+        return LSMRTreeUtils.createExternalRTree(ioManager, file, appCtx.getBufferCache(), appCtx.getFileMapManager(),
+                typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+                appCtx.getBloomFilterFalsePositiveRate(),
+                mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+                opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+                ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, -1, true, isPointMBR,
+                metadataPageManagerFactory);
     }
 }
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
index 0776567..4bb30a8 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
@@ -20,7 +20,7 @@
 
 import java.util.Map;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.transactions.Resource;
 import org.apache.hyracks.api.application.INCServiceContext;
@@ -77,7 +77,7 @@
     @Override
     public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+        INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
         IIOManager ioManager = appCtx.getIOManager();
         FileReference file = ioManager.resolve(resource.getPath());
         int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
index a682f5a..d956647 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
@@ -21,7 +21,7 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.transactions.Resource;
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -30,7 +30,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -83,38 +82,31 @@
     @Override
     public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+        INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
         IIOManager ioManager = appCtx.getIOManager();
         FileReference file = ioManager.resolve(resource.getPath());
         int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
         List<IVirtualBufferCache> virtualBufferCaches =
                 appCtx.getDatasetLifecycleManager().getVirtualBufferCaches(datasetId(), ioDeviceNum);
-        try {
-            if (isPartitioned) {
-                return InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager, virtualBufferCaches,
-                        appCtx.getFileMapManager(), invListTypeTraits, invListCmpFactories,
-                        tokenTypeTraits, tokenCmpFactories, tokenizerFactory, appCtx.getBufferCache(),
-                        file.getAbsolutePath(), appCtx.getBloomFilterFalsePositiveRate(),
-                        mergePolicyFactory.createMergePolicy(mergePolicyProperties,
-                                appCtx.getDatasetLifecycleManager()),
-                        opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
-                        ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
-                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                        invertedIndexFieldsForNonBulkLoadOps, true, metadataPageManagerFactory);
-            } else {
-                return InvertedIndexUtils.createLSMInvertedIndex(ioManager, virtualBufferCaches,
-                        appCtx.getFileMapManager(), invListTypeTraits, invListCmpFactories,
-                        tokenTypeTraits, tokenCmpFactories, tokenizerFactory, appCtx.getBufferCache(),
-                        file.getAbsolutePath(), appCtx.getBloomFilterFalsePositiveRate(),
-                        mergePolicyFactory.createMergePolicy(mergePolicyProperties,
-                                appCtx.getDatasetLifecycleManager()),
-                        opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
-                        ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
-                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                        invertedIndexFieldsForNonBulkLoadOps, true, metadataPageManagerFactory);
-            }
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
+        if (isPartitioned) {
+            return InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager, virtualBufferCaches,
+                    appCtx.getFileMapManager(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
+                    tokenCmpFactories, tokenizerFactory, appCtx.getBufferCache(), file.getAbsolutePath(),
+                    appCtx.getBloomFilterFalsePositiveRate(),
+                    mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+                    opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+                    ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+                    filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+                    metadataPageManagerFactory);
+        } else {
+            return InvertedIndexUtils.createLSMInvertedIndex(ioManager, virtualBufferCaches, appCtx.getFileMapManager(),
+                    invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+                    appCtx.getBufferCache(), file.getAbsolutePath(), appCtx.getBloomFilterFalsePositiveRate(),
+                    mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+                    opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+                    ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+                    filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+                    metadataPageManagerFactory);
         }
     }
 }
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
index c3b7348..f3ac6c1 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
@@ -21,7 +21,7 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.transactions.Resource;
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -32,7 +32,6 @@
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -84,23 +83,18 @@
     @Override
     public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
             throws HyracksDataException {
-        IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+        INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
         IIOManager ioManager = appCtx.getIOManager();
         FileReference file = ioManager.resolve(resource.getPath());
         int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
         List<IVirtualBufferCache> virtualBufferCaches =
                 appCtx.getDatasetLifecycleManager().getVirtualBufferCaches(datasetId(), ioDeviceNum);
-        try {
-            return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ioManager, virtualBufferCaches, file,
-                    appCtx.getBufferCache(), appCtx.getFileMapManager(), typeTraits,
-                    rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
-                    mergePolicyFactory.createMergePolicy(mergePolicyProperties,
-                            appCtx.getDatasetLifecycleManager()),
-                    opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
-                    ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
-                    filterCmpFactories, filterFields, true, isPointMBR, metadataPageManagerFactory);
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
+        return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ioManager, virtualBufferCaches, file,
+                appCtx.getBufferCache(), appCtx.getFileMapManager(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
+                valueProviderFactories, rtreePolicyType,
+                mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+                opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+                ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
+                filterCmpFactories, filterFields, true, isPointMBR, metadataPageManagerFactory);
     }
 }
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java
index b114527..1e22458 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java
@@ -21,7 +21,7 @@
 
 import java.nio.ByteBuffer;
 
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.transactions.ILogManager;
 import org.apache.asterix.common.transactions.ILogMarkerCallback;
@@ -65,8 +65,8 @@
     public CommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
             boolean isTemporaryDatasetWriteJob, boolean isWriteTransaction, int resourcePartition, boolean isSink) {
         this.ctx = ctx;
-        IAppRuntimeContext appCtx =
-                (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+        INcApplicationContext appCtx =
+                (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
         this.transactionManager = appCtx.getTransactionSubsystem().getTransactionManager();
         this.logMgr = appCtx.getTransactionSubsystem().getLogManager();
         this.jobId = jobId;
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
index 6fdee33..6ce543b 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
@@ -23,9 +23,9 @@
 import java.util.Set;
 import java.util.logging.Logger;
 
+import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
 import org.apache.asterix.common.config.MetadataProperties;
 import org.apache.asterix.common.replication.IReplicaResourcesManager;
 import org.apache.asterix.common.replication.IReplicationManager;
@@ -54,8 +54,8 @@
     @Override
     public synchronized void doSharpCheckpoint() throws HyracksDataException {
         LOGGER.info("Starting sharp checkpoint...");
-        final IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
-                .getDatasetLifecycleManager();
+        final IDatasetLifecycleManager datasetLifecycleManager =
+                txnSubsystem.getAsterixAppRuntimeContextProvider().getDatasetLifecycleManager();
         datasetLifecycleManager.flushAllDatasets();
         long minFirstLSN;
         // If shutting down, need to check if we need to keep any remote logs for dead replicas
@@ -99,12 +99,12 @@
         boolean checkpointSucceeded = minFirstLSN >= checkpointTargetLSN;
         if (!checkpointSucceeded) {
             // Flush datasets with indexes behind target checkpoint LSN
-            final IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
-                    .getDatasetLifecycleManager();
+            final IDatasetLifecycleManager datasetLifecycleManager =
+                    txnSubsystem.getAsterixAppRuntimeContextProvider().getDatasetLifecycleManager();
             datasetLifecycleManager.scheduleAsyncFlushForLaggingDatasets(checkpointTargetLSN);
             // Request remote replicas to flush lagging indexes
-            final IReplicationManager replicationManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
-                    .getAppContext().getReplicationManager();
+            final IReplicationManager replicationManager =
+                    txnSubsystem.getAsterixAppRuntimeContextProvider().getAppContext().getReplicationManager();
             try {
                 replicationManager.requestFlushLaggingReplicaIndexes(checkpointTargetLSN);
             } catch (IOException e) {
@@ -120,14 +120,14 @@
     }
 
     private long getDeadReplicasMinFirstLSN(Set<String> deadReplicaIds) {
-        final IReplicaResourcesManager remoteResourcesManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
-                .getAppContext().getReplicaResourcesManager();
-        final IPropertiesProvider propertiesProvider = (IPropertiesProvider) txnSubsystem
-                .getAsterixAppRuntimeContextProvider().getAppContext();
+        final IReplicaResourcesManager remoteResourcesManager =
+                txnSubsystem.getAsterixAppRuntimeContextProvider().getAppContext().getReplicaResourcesManager();
+        final IApplicationContext propertiesProvider =
+                txnSubsystem.getAsterixAppRuntimeContextProvider().getAppContext();
         final MetadataProperties metadataProperties = propertiesProvider.getMetadataProperties();
         final PersistentLocalResourceRepository localResourceRepository =
-                (PersistentLocalResourceRepository) txnSubsystem
-                .getAsterixAppRuntimeContextProvider().getLocalResourceRepository();
+                (PersistentLocalResourceRepository) txnSubsystem.getAsterixAppRuntimeContextProvider()
+                        .getLocalResourceRepository();
         // Get partitions of the dead replicas that are not active on this node
         final Set<Integer> deadReplicasPartitions = new HashSet<>();
         for (String deadReplicaId : deadReplicaIds) {
diff --git a/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java b/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java
index 5b8e5a4..d800cc7 100644
--- a/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java
+++ b/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java
@@ -23,7 +23,7 @@
 import java.util.concurrent.Executors;
 
 import org.apache.asterix.common.api.ThreadExecutor;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
 import org.apache.asterix.common.transactions.IAppRuntimeContextProvider;
 import org.apache.asterix.common.transactions.ITransactionSubsystem;
@@ -90,7 +90,7 @@
     }
 
     @Override
-    public IAppRuntimeContext getAppContext() {
+    public INcApplicationContext getAppContext() {
         throw new UnsupportedOperationException();
     }
 }
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
index 3ac8c40..ae77d3a 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
@@ -78,24 +78,22 @@
     public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
             final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
         if (inputArity == 0) {
-            return createSourceInputPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+            return createSourceInputPushRuntime(ctx);
         } else {
-            return createOneInputOneOutputPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+            return createOneInputOneOutputPushRuntime(ctx, recordDescProvider);
         }
     }
 
-    private IOperatorNodePushable createSourceInputPushRuntime(final IHyracksTaskContext ctx,
-            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+    private IOperatorNodePushable createSourceInputPushRuntime(final IHyracksTaskContext ctx) {
         return new AbstractUnaryOutputSourceOperatorNodePushable() {
 
             @Override
             public void initialize() throws HyracksDataException {
                 IFrameWriter startOfPipeline;
-                RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0
-                        ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
-
-                PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity, null,
-                        pipelineOutputRecordDescriptor);
+                RecordDescriptor pipelineOutputRecordDescriptor =
+                        outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
+                PipelineAssembler pa =
+                        new PipelineAssembler(pipeline, inputArity, outputArity, null, pipelineOutputRecordDescriptor);
                 startOfPipeline = pa.assemblePipeline(writer, ctx);
                 try {
                     startOfPipeline.open();
@@ -110,7 +108,7 @@
     }
 
     private IOperatorNodePushable createOneInputOneOutputPushRuntime(final IHyracksTaskContext ctx,
-            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+            final IRecordDescriptorProvider recordDescProvider) {
         return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
 
             private IFrameWriter startOfPipeline;
@@ -118,8 +116,8 @@
             @Override
             public void open() throws HyracksDataException {
                 if (startOfPipeline == null) {
-                    RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0
-                            ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
+                    RecordDescriptor pipelineOutputRecordDescriptor =
+                            outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
                     RecordDescriptor pipelineInputRecordDescriptor = recordDescProvider
                             .getInputRecordDescriptor(AlgebricksMetaOperatorDescriptor.this.getActivityId(), 0);
                     PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity,
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
index 11b8109..e321a1d 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
@@ -66,6 +66,22 @@
     public static final int FILE_DOES_NOT_EXISTS = 30;
     public static final int UNIDENTIFIED_IO_ERROR_DELETING_DIR = 31;
     public static final int RESULT_NO_RECORD = 32;
+    public static final int DUPLICATE_KEY = 33;
+    public static final int LOAD_NON_EMPTY_INDEX = 34;
+    public static final int MODIFY_NOT_SUPPORTED_IN_EXTERNAL_INDEX = 35;
+    public static final int FLUSH_NOT_SUPPORTED_IN_EXTERNAL_INDEX = 36;
+    public static final int UPDATE_OR_DELETE_NON_EXISTENT_KEY = 37;
+    public static final int INDEX_NOT_UPDATABLE = 38;
+    public static final int OCCURRENCE_THRESHOLD_PANIC_EXCEPTION = 39;
+    public static final int UNKNOWN_INVERTED_INDEX_TYPE = 40;
+    public static final int CANNOT_PROPOSE_LINEARIZER_DIFF_DIMENSIONS = 41;
+    public static final int CANNOT_PROPOSE_LINEARIZER_FOR_TYPE = 42;
+    public static final int RECORD_IS_TOO_LARGE = 43;
+    public static final int FAILED_TO_RE_FIND_PARENT = 44;
+    public static final int FAILED_TO_FIND_TUPLE = 45;
+    public static final int UNSORTED_LOAD_INPUT = 46;
+    public static final int OPERATION_EXCEEDED_MAX_RESTARTS = 47;
+    public static final int DUPLICATE_LOAD_INPUT = 48;
 
     // Compilation error codes.
     public static final int RULECOLLECTION_NOT_INSTANCE_OF_LIST = 10000;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
index 4c0eb1b..c9cdb2d 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
@@ -20,6 +20,8 @@
 package org.apache.hyracks.api.exceptions;
 
 import java.io.Serializable;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
 import org.apache.hyracks.api.util.ErrorMessageUtil;
 
@@ -29,11 +31,17 @@
 public class HyracksDataException extends HyracksException {
 
     private static final long serialVersionUID = 1L;
+    private static final Logger LOGGER = Logger.getLogger(HyracksDataException.class.getName());
 
     public static HyracksDataException create(Throwable cause) {
         if (cause instanceof HyracksDataException || cause == null) {
             return (HyracksDataException) cause;
         }
+        if (cause instanceof InterruptedException && !Thread.currentThread().isInterrupted()) {
+            LOGGER.log(Level.WARNING,
+                    "Wrapping an InterruptedException in HyracksDataException and current thread is not interrupted",
+                    cause);
+        }
         return new HyracksDataException(cause);
     }
 
@@ -46,8 +54,8 @@
     }
 
     public static HyracksDataException create(HyracksDataException e, String nodeId) {
-        return new HyracksDataException(e.getComponent(), e.getErrorCode(), e.getMessage(), e.getCause(), nodeId, e
-                .getParams());
+        return new HyracksDataException(e.getComponent(), e.getErrorCode(), e.getMessage(), e.getCause(), nodeId,
+                e.getParams());
     }
 
     public static HyracksDataException suppress(HyracksDataException root, Throwable th) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
index 1c4f916..eeaee04 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
@@ -20,14 +20,14 @@
 package org.apache.hyracks.api.rewriter.runtime;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Queue;
 import java.util.Map.Entry;
+import java.util.Queue;
 import java.util.concurrent.Future;
+import java.util.concurrent.Semaphore;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -43,12 +43,10 @@
 /**
  * The runtime of a SuperActivity, which internally executes a DAG of one-to-one
  * connected activities in a single thread.
- *
- * @author yingyib
  */
 public class SuperActivityOperatorNodePushable implements IOperatorNodePushable {
-    private final Map<ActivityId, IOperatorNodePushable> operatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
-    private final List<IOperatorNodePushable> operatorNodePushablesBFSOrder = new ArrayList<IOperatorNodePushable>();
+    private final Map<ActivityId, IOperatorNodePushable> operatorNodePushables = new HashMap<>();
+    private final List<IOperatorNodePushable> operatorNodePushablesBFSOrder = new ArrayList<>();
     private final Map<ActivityId, IActivity> startActivities;
     private final SuperActivity parent;
     private final IHyracksTaskContext ctx;
@@ -56,7 +54,6 @@
     private final int partition;
     private final int nPartitions;
     private int inputArity = 0;
-    private boolean[] startedInitialization;
 
     public SuperActivityOperatorNodePushable(SuperActivity parent, Map<ActivityId, IActivity> startActivities,
             IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
@@ -80,24 +77,25 @@
 
     @Override
     public void initialize() throws HyracksDataException {
-        // Initializes all OperatorNodePushables in parallel and then finally deinitializes them.
-        runInParallel((op, index) -> {
-            startedInitialization[index] = true;
-            op.initialize();
-        });
+        runInParallel(op -> op.initialize());
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+        runInParallel(op -> op.deinitialize());
     }
 
     private void init() throws HyracksDataException {
-        Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
-        Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
-        List<IConnectorDescriptor> outputConnectors = null;
+        Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<>();
+        Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<>();
+        List<IConnectorDescriptor> outputConnectors;
 
         /**
          * Set up the source operators
          */
         for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) {
-            IOperatorNodePushable opPushable = entry.getValue().createPushRuntime(ctx, recordDescProvider, partition,
-                    nPartitions);
+            IOperatorNodePushable opPushable =
+                    entry.getValue().createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
             startOperatorNodePushables.put(entry.getKey(), opPushable);
             operatorNodePushablesBFSOrder.add(opPushable);
             operatorNodePushables.put(entry.getKey(), opPushable);
@@ -154,19 +152,6 @@
                 }
             }
         }
-
-        // Sets the startedInitialization flags to be false.
-        startedInitialization = new boolean[operatorNodePushablesBFSOrder.size()];
-        Arrays.fill(startedInitialization, false);
-    }
-
-    @Override
-    public void deinitialize() throws HyracksDataException {
-        runInParallel((op, index) -> {
-            if (startedInitialization[index]) {
-                op.deinitialize();
-            }
-        });
     }
 
     @Override
@@ -192,8 +177,7 @@
          */
         Pair<ActivityId, Integer> activityIdInputIndex = parent.getActivityIdInputIndex(index);
         IOperatorNodePushable operatorNodePushable = operatorNodePushables.get(activityIdInputIndex.getLeft());
-        IFrameWriter writer = operatorNodePushable.getInputFrameWriter(activityIdInputIndex.getRight());
-        return writer;
+        return operatorNodePushable.getInputFrameWriter(activityIdInputIndex.getRight());
     }
 
     @Override
@@ -201,31 +185,40 @@
         return "Super Activity " + parent.getActivityMap().values().toString();
     }
 
+    @FunctionalInterface
     interface OperatorNodePushableAction {
-        void runAction(IOperatorNodePushable op, int opIndex) throws HyracksDataException;
+        void run(IOperatorNodePushable op) throws HyracksDataException;
     }
 
-    private void runInParallel(OperatorNodePushableAction opAction) throws HyracksDataException {
-        List<Future<Void>> initializationTasks = new ArrayList<>();
+    private void runInParallel(OperatorNodePushableAction action) throws HyracksDataException {
+        List<Future<Void>> tasks = new ArrayList<>();
+        final Semaphore startSemaphore = new Semaphore(1 - operatorNodePushablesBFSOrder.size());
+        final Semaphore completeSemaphore = new Semaphore(1 - operatorNodePushablesBFSOrder.size());
         try {
-            int index = 0;
-            // Run one action for all OperatorNodePushables in parallel through a thread pool.
             for (final IOperatorNodePushable op : operatorNodePushablesBFSOrder) {
-                final int opIndex = index++;
-                initializationTasks.add(ctx.getExecutorService().submit(() -> {
-                    opAction.runAction(op, opIndex);
+                tasks.add(ctx.getExecutorService().submit(() -> {
+                    startSemaphore.release();
+                    try {
+                        action.run(op);
+                    } finally {
+                        completeSemaphore.release();
+                    }
                     return null;
                 }));
             }
-            // Waits until all parallel actions to finish.
-            for (Future<Void> initializationTask : initializationTasks) {
-                initializationTask.get();
+            for (Future<Void> task : tasks) {
+                task.get();
             }
         } catch (Exception e) {
-            for (Future<Void> initializationTask : initializationTasks) {
-                initializationTask.cancel(true);
+            try {
+                startSemaphore.acquireUninterruptibly();
+                for (Future<Void> task : tasks) {
+                    task.cancel(true);
+                }
+            } finally {
+                completeSemaphore.acquireUninterruptibly();
             }
-            throw new HyracksDataException(e);
+            throw HyracksDataException.create(e);
         }
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
index ed054cd..c3c2596 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
@@ -51,5 +51,21 @@
 30 = File %1$s doesn't exists
 31 = Unidentified IO error occurred while deleting the dir %1$s
 32 = No record for partition %1$s of result set %2$s
+33 = Inserting duplicate keys into the primary storage
+34 = Cannot load an index that is not empty
+35 = Modify not supported in External LSM Inedx
+36 = Flush not supported in External LSM Inedx
+37 = Index key not found
+38 = Index is not updatable
+39 = Merge Threshold is less than or equal to 0
+40 = Unknown inverted index type %1$s
+41 = Cannot propose linearizer if dimensions have different types
+42 = Cannot propose linearizer for type %1$s
+43 = Record size (%1$s) larger than maximum acceptable record size (%2$s)
+44 = Failed to re-find parent of a page in the tree
+45 = Failed to find a tuple in a page
+46 = Unsorted load input
+47 = Operation exceeded the maximum number of restarts %1$s
+48 = Loading duplicate keys into the primary storage
 
 10000 = The given rule collection %1$s is not an instance of the List class.
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
index 6e4214f..ac69a5a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
@@ -54,7 +54,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
@@ -69,8 +68,8 @@
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.apache.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
 import org.apache.hyracks.test.support.TestIndexLifecycleManagerProvider;
-import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.test.support.TestStorageManager;
+import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
 import org.apache.hyracks.tests.integration.AbstractIntegrationTest;
 import org.junit.After;
@@ -202,7 +201,7 @@
     protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory(
             IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
             IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory,
-            int[] btreeFields) throws TreeIndexException;
+            int[] btreeFields) throws HyracksDataException;
 
     protected void createPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
index 83318ce..0296a35 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
@@ -27,6 +27,7 @@
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -38,7 +39,6 @@
 import org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
@@ -116,7 +116,7 @@
     protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
             IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
             IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory,
-            int[] btreeFields) throws TreeIndexException {
+            int[] btreeFields) throws HyracksDataException {
         return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
                 rtreePolicyType, null, true);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
index 7c2676d..092aa9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -27,6 +27,7 @@
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -38,7 +39,6 @@
 import org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
@@ -114,7 +114,7 @@
     protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
             IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
             IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory,
-            int[] btreeFields) throws TreeIndexException {
+            int[] btreeFields) throws HyracksDataException {
         return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
                 rtreePolicyType, null, true);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
index 683857f..8b0b8a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
@@ -21,6 +21,7 @@
 import java.nio.ByteBuffer;
 import java.util.HashSet;
 import java.util.Set;
+import java.util.concurrent.CountDownLatch;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
@@ -124,7 +125,8 @@
                 IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
                         throws HyracksDataException {
             return new IOperatorNodePushable() {
-                private Set<Long> threads = new HashSet<Long>();
+                private CountDownLatch allOpenedSignal = new CountDownLatch(3);
+                private Set<Long> threads = new HashSet<>();
 
                 @Override
                 public void initialize() throws HyracksDataException {
@@ -154,6 +156,7 @@
                     return new IFrameWriter() {
                         @Override
                         public void open() throws HyracksDataException {
+                            allOpenedSignal.countDown();
                             synchronized (threads) {
                                 threads.add(Thread.currentThread().getId());
                             }
@@ -171,7 +174,11 @@
 
                         @Override
                         public void close() throws HyracksDataException {
-
+                            try {
+                                allOpenedSignal.await();
+                            } catch (InterruptedException e) {
+                                // This test should not be interrupted
+                            }
                         }
 
                         @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
index 80c3628..dee8271 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.storage.common.buffercache.ICachedPage;
 import org.apache.hyracks.storage.common.buffercache.IFIFOPageQueue;
@@ -34,11 +33,11 @@
 
 public class BloomFilter {
 
-    private final static int METADATA_PAGE_ID = 0;
-    private final static int NUM_PAGES_OFFSET = 0; // 0
-    private final static int NUM_HASHES_USED_OFFSET = NUM_PAGES_OFFSET + 4; // 4
-    private final static int NUM_ELEMENTS_OFFSET = NUM_HASHES_USED_OFFSET + 4; // 8
-    private final static int NUM_BITS_OFFSET = NUM_ELEMENTS_OFFSET + 8; // 12
+    private static final int METADATA_PAGE_ID = 0;
+    private static final int NUM_PAGES_OFFSET = 0; // 0
+    private static final int NUM_HASHES_USED_OFFSET = NUM_PAGES_OFFSET + 4; // 4
+    private static final int NUM_ELEMENTS_OFFSET = NUM_HASHES_USED_OFFSET + 4; // 8
+    private static final int NUM_BITS_OFFSET = NUM_ELEMENTS_OFFSET + 8; // 12
 
     private final IBufferCache bufferCache;
     private final IFileMapProvider fileMapProvider;
@@ -258,7 +257,7 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             if (numPages == 0) {
                 throw new HyracksDataException(
                         "Cannot add elements to this filter since it is supposed to be empty (number of elements hint passed to the filter during construction was 0).");
@@ -278,7 +277,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             allocateAndInitMetaDataPage();
             queue.put(metaDataPage);
             for (ICachedPage p : pages) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java
index e0abb1c..2383192 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java
@@ -23,12 +23,11 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 
 public interface IBTreeFrame extends ITreeIndexFrame {
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
+    public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException;
 
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException;
+    public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException;
 
     public void insertSorted(ITupleReference tuple) throws HyracksDataException;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
index 2e072ce..37e0ab8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
@@ -22,7 +22,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -33,9 +32,9 @@
     public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
                               FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) throws HyracksDataException;
 
-    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException;
+    public int findUpdateTupleIndex(ITupleReference tuple) throws HyracksDataException;
 
-    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
+    public int findUpsertTupleIndex(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * @param searchTuple
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeException.java
deleted file mode 100644
index bb50054..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.btree.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-
-public class BTreeException extends TreeIndexException {
-
-    protected static final long serialVersionUID = 1L;
-
-    public BTreeException(Exception e) {
-        super(e);
-    }
-
-    public BTreeException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
deleted file mode 100644
index 5846747..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.btree.exceptions;
-
-public class BTreeNotUpdateableException extends BTreeException {
-    private static final long serialVersionUID = 1L;
-
-    public BTreeNotUpdateableException(Exception e) {
-        super(e);
-    }
-
-    public BTreeNotUpdateableException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
index 157c663..1bc6db0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -24,6 +24,7 @@
 import java.util.Collections;
 
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -38,9 +39,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
@@ -116,7 +114,7 @@
         try {
             return compressor.compress(this, cmp);
         } catch (Exception e) {
-            throw new HyracksDataException(e);
+            throw HyracksDataException.create(e);
         }
     }
 
@@ -174,8 +172,7 @@
             int tupleLength = tupleEndOff - tupleOff;
             System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
 
-            slotManager.setSlot(sortedTupleOffs.get(i).slotOff,
-                    slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
             freeSpace += tupleLength;
         }
 
@@ -211,13 +208,13 @@
         }
 
         frameTuple.resetByTupleIndex(this, tupleIndex);
-        tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
-                - suffixFieldStart);
+        tupleSize =
+                tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount() - suffixFieldStart);
 
-        buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET, buf.getInt(
-                ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) - 1);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) + tupleSize
-                + slotManager.getSlotSize());
+        buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET,
+                buf.getInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) - 1);
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) + tupleSize + slotManager.getSlotSize());
     }
 
     @Override
@@ -245,8 +242,8 @@
             int prefixSlot = buf.getInt(prefixSlotOff);
             int numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
 
-            int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount()
-                    - numPrefixFields);
+            int compressedSize =
+                    tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount() - numPrefixFields);
             if (compressedSize + slotManager.getSlotSize() <= freeContiguous) {
                 return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
             }
@@ -269,15 +266,15 @@
         }
 
         int freeSpace = buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET);
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
-                tuple.getFieldCount() - numPrefixFields, buf.array(), freeSpace);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields, tuple.getFieldCount() - numPrefixFields,
+                buf.array(), freeSpace);
 
-        buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET, buf.getInt(
-                ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
-        buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET, buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET)
-                + bytesWritten);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten
-                - slotManager.getSlotSize());
+        buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET,
+                buf.getInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
+        buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET,
+                buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET) + bytesWritten);
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager.getSlotSize());
     }
 
     @Override
@@ -380,35 +377,28 @@
     }
 
     @Override
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int slot;
-        try {
-            slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp,
-                    FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS, FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+
         int tupleIndex = slotManager.decodeSecondSlotField(slot);
         // Error indicator is set if there is an exact match.
         if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new TreeIndexDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+            throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
         }
         return slot;
     }
 
     @Override
-    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findUpsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int slot;
-        try {
-            slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.INCLUSIVE,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         int tupleIndex = slotManager.decodeSecondSlotField(slot);
         // Error indicator is set if there is an exact match.
         if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new TreeIndexDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+            throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
         }
         return slot;
     }
@@ -432,37 +422,27 @@
     }
 
     @Override
-    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findUpdateTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int slot;
-        try {
-            slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         int tupleIndex = slotManager.decodeSecondSlotField(slot);
         // Error indicator is set if there is no exact match.
         if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new TreeIndexNonExistentKeyException(
-                    "Trying to update a tuple with a nonexistent key in leaf node.");
+            throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
         }
         return slot;
     }
 
     @Override
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int slot;
-        try {
-            slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         int tupleIndex = slotManager.decodeSecondSlotField(slot);
         // Error indicator is set if there is no exact match.
         if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new TreeIndexNonExistentKeyException(
-                    "Trying to delete a tuple with a nonexistent key in leaf node.");
+            throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
         }
         return slot;
     }
@@ -579,8 +559,8 @@
             }
         }
 
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate, tuple.getFieldCount()
-                - fieldsToTruncate, buf.array(), freeSpace);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate,
+                tuple.getFieldCount() - fieldsToTruncate, buf.array(), freeSpace);
 
         // insert slot
         int prefixSlotNum = FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
@@ -593,18 +573,17 @@
         slotManager.insertSlot(insSlot, freeSpace);
 
         // update page metadata
-        buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET, buf.getInt(
-                ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
-        buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET, buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET)
-                + bytesWritten);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten
-                - slotManager.getSlotSize());
+        buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET,
+                buf.getInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
+        buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET,
+                buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET) + bytesWritten);
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager.getSlotSize());
     }
 
     @Override
     public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
-                      IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
-            throws HyracksDataException {
+            IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
 
         BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame) rightFrame;
 
@@ -669,16 +648,16 @@
                     int bytesWritten = 0;
                     if (lastPrefixSlotNum != prefixSlotNum) {
                         bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right.array(), freeSpace);
-                        int newPrefixSlot = rf.slotManager
-                                .encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
+                        int newPrefixSlot =
+                                rf.slotManager.encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
                         int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
                         right.putInt(prefixSlotOff, newPrefixSlot);
                         lastPrefixSlotNum = prefixSlotNum;
                     }
 
                     int tupleOff = rf.slotManager.decodeSecondSlotField(tupleSlot);
-                    int newTupleSlot = rf.slotManager.encodeSlotFields(prefixSlotNum
-                            - (prefixTupleCount - prefixesToRight), tupleOff);
+                    int newTupleSlot = rf.slotManager
+                            .encodeSlotFields(prefixSlotNum - (prefixTupleCount - prefixesToRight), tupleOff);
                     right.putInt(tupleSlotOff, newTupleSlot);
                     freeSpace += bytesWritten;
                 }
@@ -687,8 +666,8 @@
 
         // move the modified prefix slots on the right page
         int prefixSrc = rf.slotManager.getPrefixSlotEndOff();
-        int prefixDest = rf.slotManager.getPrefixSlotEndOff() + (prefixTupleCount - prefixesToRight)
-                * rf.slotManager.getSlotSize();
+        int prefixDest = rf.slotManager.getPrefixSlotEndOff()
+                + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
         int prefixLength = rf.slotManager.getSlotSize() * prefixesToRight;
         System.arraycopy(right.array(), prefixSrc, right.array(), prefixDest, prefixLength);
 
@@ -719,11 +698,7 @@
         // insert last key
         int targetTupleIndex;
         // it's safe to catch this exception since it will have been caught before reaching here
-        try {
-            targetTupleIndex = ((IBTreeLeafFrame) targetFrame).findInsertTupleIndex(tuple);
-        } catch (TreeIndexException e) {
-            throw new IllegalStateException(e);
-        }
+        targetTupleIndex = ((IBTreeLeafFrame) targetFrame).findInsertTupleIndex(tuple);
         targetFrame.insert(tuple, targetTupleIndex);
 
         // set split key to be highest value in left page
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
index e59523c..6264086 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -33,7 +33,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import org.apache.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
@@ -71,13 +70,9 @@
     }
 
     @Override
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        try {
-            return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
     }
 
     @Override
@@ -107,8 +102,8 @@
         int tupleSize = bytesWritten + CHILD_PTR_SIZE;
         buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
         buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + tupleSize);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager
-                .getSlotSize());
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager.getSlotSize());
         // Did we insert into the rightmost slot?
         if (slotOff == slotManager.getSlotEndOff()) {
             System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple) + CHILD_PTR_SIZE,
@@ -127,13 +122,9 @@
     }
 
     @Override
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
-        try {
-            return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
     }
 
     @Override
@@ -194,17 +185,15 @@
         int tupleSize = bytesWritten + CHILD_PTR_SIZE;
         buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
         buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + tupleSize);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager
-                .getSlotSize());
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager.getSlotSize());
         System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + CHILD_PTR_SIZE, buf.array(),
-                RIGHT_LEAF_OFFSET,
-                CHILD_PTR_SIZE);
+                RIGHT_LEAF_OFFSET, CHILD_PTR_SIZE);
     }
 
     @Override
     public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
-                      IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
-                    throws HyracksDataException, TreeIndexException {
+            IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
         ByteBuffer right = rightFrame.getBuffer();
         int tupleCount = getTupleCount();
 
@@ -276,11 +265,7 @@
         // Insert the saved split key.
         int targetTupleIndex;
         // it's safe to catch this exception since it will have been caught before reaching here
-        try {
-            targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame).findInsertTupleIndex(savedSplitKey.getTuple());
-        } catch (TreeIndexException e) {
-            throw new IllegalStateException(e);
-        }
+        targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame).findInsertTupleIndex(savedSplitKey.getTuple());
         targetFrame.insert(savedSplitKey.getTuple(), targetTupleIndex);
     }
 
@@ -338,8 +323,8 @@
             fsm = FindTupleMode.EXCLUSIVE;
         }
         // Search for a matching key.
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm,
-                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex =
+                slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm, FindTupleNoExactMatchPolicy.HIGHER_KEY);
         int slotOff = slotManager.getSlotOff(tupleIndex);
         // Follow the rightmost (greatest) child pointer.
         if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
@@ -423,8 +408,8 @@
         for (int i = 0; i < tupleCount; i++) {
             int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
             frameTuple.resetByTupleOffset(buf.array(), tupleOff);
-            int intVal = IntegerPointable.getInteger(buf.array(),
-                    frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+            int intVal =
+                    IntegerPointable.getInteger(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
                             + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
             ret.add(intVal);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
index 6a957b5..e070b51 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -21,6 +21,7 @@
 
 import java.nio.ByteBuffer;
 
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -29,9 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import org.apache.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
@@ -79,46 +77,34 @@
     }
 
     @Override
-    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int tupleIndex;
-        try {
-            tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         // Error indicator is set if there is an exact match.
         if (tupleIndex == slotManager.getErrorIndicator()) {
-            throw new TreeIndexDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+            throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
         }
         return tupleIndex;
     }
 
     @Override
-    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findUpdateTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int tupleIndex;
-        try {
-            tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         // Error indicator is set if there is no exact match.
         if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
-            throw new TreeIndexNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
+            throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
         }
         return tupleIndex;
     }
 
     @Override
-    public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findUpsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int tupleIndex;
-        try {
-            tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         // Just return the found tupleIndex. The caller will make the final
         // decision whether to insert or update.
         return tupleIndex;
@@ -144,17 +130,13 @@
     }
 
     @Override
-    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException {
         int tupleIndex;
-        try {
-            tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
-                    FindTupleNoExactMatchPolicy.HIGHER_KEY);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         // Error indicator is set if there is no exact match.
         if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
-            throw new TreeIndexNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+            throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
         }
         return tupleIndex;
     }
@@ -166,8 +148,8 @@
         int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
         buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
         buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + bytesWritten);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager
-                .getSlotSize());
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager.getSlotSize());
     }
 
     @Override
@@ -207,8 +189,7 @@
 
     @Override
     public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
-                      IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
-            throws HyracksDataException {
+            IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
 
         int tupleSize = getBytesRequiredToWriteTuple(tuple);
 
@@ -280,11 +261,7 @@
         int targetTupleIndex;
         // it's safe to catch this exception since it will have been caught
         // before reaching here
-        try {
-            targetTupleIndex = targetFrame.findInsertTupleIndex(tuple);
-        } catch (TreeIndexException e) {
-            throw new IllegalStateException(e);
-        }
+        targetTupleIndex = targetFrame.findInsertTupleIndex(tuple);
         targetFrame.insert(tuple, targetTupleIndex);
 
         // Set the split key to be highest key in the left page.
@@ -298,7 +275,7 @@
 
     @Override
     public void ensureCapacity(IBufferCache bufferCache, ITupleReference tuple,
-                               IExtraPageBlockHelper extraPageBlockHelper) throws HyracksDataException {
+            IExtraPageBlockHelper extraPageBlockHelper) throws HyracksDataException {
         // we call ensureCapacity() for large tuples- ensure large flag is set
         setLargeFlag(true);
         int gapBytes = getBytesRequiredToWriteTuple(tuple) - getFreeContiguousSpace();
@@ -308,8 +285,8 @@
         }
     }
 
-    private void growCapacity(IExtraPageBlockHelper extraPageBlockHelper,
-            IBufferCache bufferCache, int deltaPages) throws HyracksDataException {
+    private void growCapacity(IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache, int deltaPages)
+            throws HyracksDataException {
         int framePagesOld = page.getFrameSizeMultiplier();
         int newMultiplier = framePagesOld + deltaPages;
 
@@ -325,8 +302,8 @@
         System.arraycopy(buf.array(), oldSlotEnd, buf.array(), slotManager.getSlotEndOff(), oldSlotStart - oldSlotEnd);
 
         // fixup total free space counter
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) + (bufferCache.getPageSize()
-                * deltaPages));
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) + (bufferCache.getPageSize() * deltaPages));
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java
index 653689a..ba84031 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java
@@ -27,6 +27,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -36,8 +37,6 @@
 import org.apache.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
 import org.apache.hyracks.storage.am.btree.api.ITupleAcceptor;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
 import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrame;
 import org.apache.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
@@ -53,11 +52,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.api.UnsortedInputException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import org.apache.hyracks.storage.am.common.impls.AbstractTreeIndex;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -75,9 +69,9 @@
 
     public static final float DEFAULT_FILL_FACTOR = 0.7f;
 
-    private final static long RESTART_OP = Long.MIN_VALUE;
-    private final static long FULL_RESTART_OP = Long.MIN_VALUE + 1;
-    private final static int MAX_RESTARTS = 10;
+    private static final long RESTART_OP = Long.MIN_VALUE;
+    private static final long FULL_RESTART_OP = Long.MIN_VALUE + 1;
+    private static final int MAX_RESTARTS = 10;
 
     private final AtomicInteger smoCounter;
     private final ReadWriteLock treeLatch;
@@ -116,7 +110,7 @@
         } catch (Exception e) {
             page.releaseReadLatch();
             bufferCache.unpin(page);
-            throw new HyracksDataException(e);
+            throw HyracksDataException.create(e);
         }
     }
 
@@ -125,8 +119,8 @@
         // Stack validation protocol:
         //      * parent pushes the validation information onto the stack before validation
         //      * child pops the validation information off of the stack after validating
-        BTreeAccessor accessor = (BTreeAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        BTreeAccessor accessor =
+                (BTreeAccessor) createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         PageValidationInfo pvi = accessor.ctx.createPageValidationInfo(null);
         accessor.ctx.validationInfos.addFirst(pvi);
         if (isActive) {
@@ -187,7 +181,7 @@
     }
 
     private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, BTreeOpContext ctx)
-            throws TreeIndexException, HyracksDataException {
+            throws HyracksDataException {
         ctx.reset();
         ctx.pred = (RangePredicate) searchPred;
         ctx.cursor = cursor;
@@ -241,10 +235,10 @@
         ctx.interiorFrame.setPage(originalPage);
     }
 
-    private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+    private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException {
         // Make sure the root is always in the same page.
-        ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
-                false);
+        ICachedPage leftNode =
+                bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()), false);
         leftNode.acquireWriteLatch();
         try {
             int newLeftId = freePageManager.takePage(ctx.metaFrame);
@@ -282,8 +276,7 @@
                 int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(ctx.splitKey.getTuple());
                 int tupleSize = ctx.interiorFrame.getBytesRequiredToWriteTuple(ctx.splitKey.getTuple());
                 if (tupleSize > maxTupleSize) {
-                    throw new TreeIndexException("Space required for record (" + tupleSize
-                            + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+                    throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
                 }
                 ctx.interiorFrame.insert(ctx.splitKey.getTuple(), targetTupleIndex);
             } finally {
@@ -296,72 +289,6 @@
         }
     }
 
-    private void insertUpdateOrDelete(ITupleReference tuple, BTreeOpContext ctx)
-            throws HyracksDataException, TreeIndexException {
-        ctx.reset();
-        ctx.pred.setLowKeyComparator(ctx.cmp);
-        ctx.pred.setHighKeyComparator(ctx.cmp);
-        ctx.pred.setLowKey(tuple, true);
-        ctx.pred.setHighKey(tuple, true);
-        ctx.splitKey.reset();
-        ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
-        // We use this loop to deal with possibly multiple operation restarts
-        // due to ongoing structure modifications during the descent.
-        boolean repeatOp = true;
-        while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
-            ctx.smoCount = smoCounter.get();
-            performOp(rootPage, null, true, ctx);
-            // Do we need to restart from the (possibly new) root?
-            if (!ctx.pageLsns.isEmpty()) {
-                if (ctx.pageLsns.getLast() == FULL_RESTART_OP) {
-                    ctx.pageLsns.clear();
-                    continue;
-                } else if (ctx.pageLsns.getLast() == RESTART_OP) {
-                    ctx.pageLsns.removeLast(); // pop the restart op indicator
-                    continue;
-                }
-
-            }
-            // Split key propagated?
-            if (ctx.splitKey.getBuffer() != null) {
-                // Insert or update op. Create a new root.
-                createNewRoot(ctx);
-            }
-            unsetSmPages(ctx);
-            repeatOp = false;
-        }
-
-        if (ctx.opRestarts >= MAX_RESTARTS) {
-            throw new BTreeException("Operation exceeded the maximum number of restarts");
-        }
-    }
-
-    private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private void update(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        // This call only allows updating of non-key fields.
-        // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
-        // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert).
-        if (fieldCount == ctx.cmp.getKeyFieldCount()) {
-            throw new BTreeNotUpdateableException("Cannot perform updates when the entire tuple forms the key.");
-        }
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
-    private void delete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
-        ctx.modificationCallback.before(tuple);
-        insertUpdateOrDelete(tuple, ctx);
-    }
-
     private boolean insertLeaf(ITupleReference tuple, int targetTupleIndex, int pageId, BTreeOpContext ctx)
             throws Exception {
         boolean restartOp = false;
@@ -590,8 +517,7 @@
 
             case TOO_LARGE: {
                 int tupleSize = ctx.interiorFrame.getBytesRequiredToWriteTuple(tuple);
-                throw new TreeIndexException("Space required for record (" + tupleSize
-                        + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+                throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
             }
 
             default: {
@@ -606,7 +532,7 @@
         // This means that there could be underflow, even an empty page that is
         // pointed to by an interior node.
         if (ctx.leafFrame.getTupleCount() == 0) {
-            throw new TreeIndexNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+            throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
         }
         int tupleIndex = ctx.leafFrame.findDeleteTupleIndex(tuple);
         ITupleReference beforeTuple = ctx.leafFrame.getMatchingKeyTuple(tuple, tupleIndex);
@@ -639,7 +565,7 @@
     }
 
     private void performOp(int pageId, ICachedPage parent, boolean parentIsReadLatched, BTreeOpContext ctx)
-            throws HyracksDataException, TreeIndexException {
+            throws HyracksDataException {
         ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
         ctx.interiorFrame.setPage(node);
         // this check performs an unprotected read in the page
@@ -701,8 +627,8 @@
                             case UPDATE: {
                                 // Is there a propagated split key?
                                 if (ctx.splitKey.getBuffer() != null) {
-                                    ICachedPage interiorNode = bufferCache.pin(
-                                            BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                                    ICachedPage interiorNode =
+                                            bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
                                     interiorNode.acquireWriteLatch();
                                     try {
                                         // Insert or update op. Both can cause split keys to propagate upwards.
@@ -719,7 +645,7 @@
 
                             case DELETE: {
                                 if (ctx.splitKey.getBuffer() != null) {
-                                    throw new BTreeException(
+                                    throw new HyracksDataException(
                                             "Split key was propagated during delete. Delete allows empty leaf pages.");
                                 }
                                 break;
@@ -802,7 +728,7 @@
                     ctx.pageLsns.add(FULL_RESTART_OP);
                 }
             }
-        } catch (TreeIndexException e) {
+        } catch (HyracksDataException e) {
             if (!ctx.exceptionHandled) {
                 if (node != null) {
                     if (isReadLatched) {
@@ -824,7 +750,7 @@
                 }
                 bufferCache.unpin(node);
             }
-            BTreeException wrappedException = new BTreeException(e);
+            HyracksDataException wrappedException = HyracksDataException.create(e);
             ctx.exceptionHandled = true;
             throw wrappedException;
         }
@@ -832,8 +758,8 @@
 
     private BTreeOpContext createOpContext(IIndexAccessor accessor, IModificationOperationCallback modificationCallback,
             ISearchOperationCallback searchCallback) {
-        return new BTreeOpContext(accessor, leafFrameFactory, interiorFrameFactory,
-                freePageManager, cmpFactories, modificationCallback, searchCallback);
+        return new BTreeOpContext(accessor, leafFrameFactory, interiorFrameFactory, freePageManager, cmpFactories,
+                modificationCallback, searchCallback);
     }
 
     @SuppressWarnings("rawtypes")
@@ -923,33 +849,33 @@
         }
 
         @Override
-        public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void insert(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.INSERT);
-            btree.insert(tuple, ctx);
+            insert(tuple, ctx);
         }
 
         @Override
-        public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void update(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.UPDATE);
-            btree.update(tuple, ctx);
+            update(tuple, ctx);
         }
 
         @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.DELETE);
-            btree.delete(tuple, ctx);
+            delete(tuple, ctx);
         }
 
         @Override
-        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void upsert(ITupleReference tuple) throws HyracksDataException {
             upsertIfConditionElseInsert(tuple, UnconditionalTupleAcceptor.INSTANCE);
         }
 
         public void upsertIfConditionElseInsert(ITupleReference tuple, ITupleAcceptor acceptor)
-                throws HyracksDataException, TreeIndexException {
+                throws HyracksDataException {
             ctx.setOperation(IndexOperation.UPSERT);
             ctx.acceptor = acceptor;
-            btree.upsert(tuple, ctx);
+            upsert(tuple, ctx);
         }
 
         @Override
@@ -959,8 +885,7 @@
         }
 
         @Override
-        public void search(IIndexCursor cursor, ISearchPredicate searchPred)
-                throws HyracksDataException, TreeIndexException {
+        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
             ctx.setOperation(IndexOperation.SEARCH);
             btree.search((ITreeIndexCursor) cursor, searchPred, ctx);
         }
@@ -988,24 +913,84 @@
             IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
             return new BTreeCountingSearchCursor(leafFrame, false);
         }
+
+        private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+            ctx.modificationCallback.before(tuple);
+            insertUpdateOrDelete(tuple, ctx);
+        }
+
+        private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+            ctx.modificationCallback.before(tuple);
+            insertUpdateOrDelete(tuple, ctx);
+        }
+
+        private void update(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+            // This call only allows updating of non-key fields.
+            // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
+            // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert).
+            if (fieldCount == ctx.cmp.getKeyFieldCount()) {
+                HyracksDataException.create(ErrorCode.INDEX_NOT_UPDATABLE);
+            }
+            ctx.modificationCallback.before(tuple);
+            insertUpdateOrDelete(tuple, ctx);
+        }
+
+        private void delete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+            ctx.modificationCallback.before(tuple);
+            insertUpdateOrDelete(tuple, ctx);
+        }
+
+        private void insertUpdateOrDelete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+            ctx.reset();
+            ctx.pred.setLowKeyComparator(ctx.cmp);
+            ctx.pred.setHighKeyComparator(ctx.cmp);
+            ctx.pred.setLowKey(tuple, true);
+            ctx.pred.setHighKey(tuple, true);
+            ctx.splitKey.reset();
+            ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
+            // We use this loop to deal with possibly multiple operation restarts
+            // due to ongoing structure modifications during the descent.
+            boolean repeatOp = true;
+            while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+                ctx.smoCount = smoCounter.get();
+                performOp(rootPage, null, true, ctx);
+                // Do we need to restart from the (possibly new) root?
+                if (!ctx.pageLsns.isEmpty()) {
+                    if (ctx.pageLsns.getLast() == FULL_RESTART_OP) {
+                        ctx.pageLsns.clear();
+                        continue;
+                    } else if (ctx.pageLsns.getLast() == RESTART_OP) {
+                        ctx.pageLsns.removeLast(); // pop the restart op indicator
+                        continue;
+                    }
+
+                }
+                // Split key propagated?
+                if (ctx.splitKey.getBuffer() != null) {
+                    // Insert or update op. Create a new root.
+                    createNewRoot(ctx);
+                }
+                unsetSmPages(ctx);
+                repeatOp = false;
+            }
+
+            if (ctx.opRestarts >= MAX_RESTARTS) {
+                throw HyracksDataException.create(ErrorCode.OPERATION_EXCEEDED_MAX_RESTARTS, MAX_RESTARTS);
+            }
+        }
     }
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new BTreeBulkLoader(fillFactor, verifyInput);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new BTreeBulkLoader(fillFactor, verifyInput);
     }
 
     public class BTreeBulkLoader extends AbstractTreeIndex.AbstractTreeIndexBulkLoader {
         protected final ISplitKey splitKey;
         protected final boolean verifyInput;
 
-        public BTreeBulkLoader(float fillFactor, boolean verifyInput)
-                throws TreeIndexException, HyracksDataException {
+        public BTreeBulkLoader(float fillFactor, boolean verifyInput) throws HyracksDataException {
             super(fillFactor);
             this.verifyInput = verifyInput;
             splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
@@ -1013,7 +998,7 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 int tupleSize = Math.max(leafFrame.getBytesRequiredToWriteTuple(tuple),
                         interiorFrame.getBytesRequiredToWriteTuple(tuple));
@@ -1062,8 +1047,8 @@
                         final long dpid = BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId);
                         // calculate required number of pages.
                         int headerSize = Math.max(leafFrame.getPageHeaderSize(), interiorFrame.getPageHeaderSize());
-                        final int multiplier = (int) Math
-                                .ceil((double) tupleSize / (bufferCache.getPageSize() - headerSize));
+                        final int multiplier =
+                                (int) Math.ceil((double) tupleSize / (bufferCache.getPageSize() - headerSize));
                         if (multiplier > 1) {
                             leafFrontier.page = bufferCache.confiscateLargePage(dpid, multiplier,
                                     freePageManager.takeBlock(metaFrame, multiplier - 1));
@@ -1086,26 +1071,24 @@
                     }
                 }
                 ((IBTreeLeafFrame) leafFrame).insertSorted(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (HyracksDataException | RuntimeException e) {
                 handleException();
                 throw e;
             }
         }
 
-        protected void verifyInputTuple(ITupleReference tuple, ITupleReference prevTuple)
-                throws IndexException, HyracksDataException {
+        protected void verifyInputTuple(ITupleReference tuple, ITupleReference prevTuple) throws HyracksDataException {
             // New tuple should be strictly greater than last tuple.
             int cmpResult = cmp.compare(tuple, prevTuple);
             if (cmpResult < 0) {
-                throw new UnsortedInputException("Input stream given to BTree bulk load is not sorted.");
+                throw HyracksDataException.create(ErrorCode.UNSORTED_LOAD_INPUT);
             }
             if (cmpResult == 0) {
-                throw new TreeIndexDuplicateKeyException("Input stream given to BTree bulk load has duplicates.");
+                throw HyracksDataException.create(ErrorCode.DUPLICATE_LOAD_INPUT);
             }
         }
 
-        protected void propagateBulk(int level, List<ICachedPage> pagesToWrite)
-                throws HyracksDataException, TreeIndexException {
+        protected void propagateBulk(int level, List<ICachedPage> pagesToWrite) throws HyracksDataException {
             if (splitKey.getBuffer() == null) {
                 return;
             }
@@ -1121,9 +1104,8 @@
             int tupleBytes = tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount());
             int spaceNeeded = tupleBytes + slotSize + 4;
             if (tupleBytes > interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize())) {
-                throw new TreeIndexException(
-                        "Space required for record (" + tupleBytes + ") larger than maximum acceptable size ("
-                                + interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize()) + ")");
+                throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleBytes,
+                        interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize()));
             }
 
             int spaceUsed = interiorFrame.getBuffer().capacity() - interiorFrame.getTotalFreeSpace();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
index 2297cc5..a02b492 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
@@ -78,12 +78,7 @@
     public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         // in case open is called multiple times without closing
         if (page != null) {
-            if (exclusiveLatchNodes) {
-                page.releaseWriteLatch(isPageDirty);
-            } else {
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
+            releasePage();
         }
 
         page = ((BTreeCursorInitialState) initialState).getPage();
@@ -116,6 +111,15 @@
         stopTupleIndex = getHighKeyIndex();
     }
 
+    private void releasePage() throws HyracksDataException {
+        if (exclusiveLatchNodes) {
+            page.releaseWriteLatch(isPageDirty);
+        } else {
+            page.releaseReadLatch();
+        }
+        bufferCache.unpin(page);
+    }
+
     private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
         do {
             ICachedPage nextLeaf = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeafPage), false);
@@ -205,12 +209,7 @@
     @Override
     public void close() throws HyracksDataException {
         if (page != null) {
-            if (exclusiveLatchNodes) {
-                page.releaseWriteLatch(isPageDirty);
-            } else {
-                page.releaseReadLatch();
-            }
-            bufferCache.unpin(page);
+            releasePage();
         }
         tupleBuilder.reset();
         tupleIndex = 0;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
index b4fdd9e..5452604 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
@@ -31,7 +31,6 @@
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import org.apache.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -98,6 +97,7 @@
         pred = null;
     }
 
+    @Override
     public ITupleReference getTuple() {
         return frameTuple;
     }
@@ -183,11 +183,7 @@
 
                 // retraverse the index looking for the reconciled key
                 reusablePredicate.setLowKey(reconciliationTuple, true);
-                try {
-                    accessor.search(this, reusablePredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                accessor.search(this, reusablePredicate);
 
                 if (stopTupleIndex < 0 || tupleIndex > stopTupleIndex) {
                     return false;
@@ -319,7 +315,7 @@
         }
     }
 
-    public boolean isBloomFilterAware(){
+    public boolean isBloomFilterAware() {
         return false;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java
index f1a411b..5b7fa42 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java
@@ -19,6 +19,7 @@
 
 package org.apache.hyracks.storage.am.btree.impls;
 
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -130,4 +131,27 @@
     public void setHighKeyCmp(MultiComparator highKeyCmp) {
         this.highKeyCmp = highKeyCmp;
     }
+
+    public boolean isPointPredicate(MultiComparator originalKeyComparator) throws HyracksDataException {
+        if (getLowKey() == null) {
+            return false;
+        }
+        if (getHighKey() == null) {
+            return false;
+        }
+        if (!isLowKeyInclusive()) {
+            return false;
+        }
+        if (!isHighKeyInclusive()) {
+            return false;
+        }
+        if (getLowKeyComparator().getKeyFieldCount() != getHighKeyComparator().getKeyFieldCount()) {
+            return false;
+        }
+        if (getLowKeyComparator().getKeyFieldCount() != originalKeyComparator.getKeyFieldCount()) {
+            return false;
+        }
+        return originalKeyComparator.compare(getLowKey(), getHighKey()) == 0;
+
+    }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java
index d713a92..75b2fdd 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java
@@ -48,7 +48,6 @@
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable;
@@ -273,8 +272,8 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    public IFrameWriter[] createWriters() throws HyracksDataException, IndexException {
-        ArrayList<BTreeSearchOperatorNodePushable> writers = new ArrayList<BTreeSearchOperatorNodePushable>();
+    public IFrameWriter[] createWriters() throws HyracksDataException {
+        ArrayList<BTreeSearchOperatorNodePushable> writers = new ArrayList<>();
         AbstractTreeIndexOperatorDescriptor[] opDescs = mockIndexOpDesc();
         IRecordDescriptorProvider[] recordDescProviders = mockRecDescProviders();
         int partition = 0;
@@ -320,7 +319,7 @@
         return rDesc;
     }
 
-    public ITreeIndex[] mockIndexes() throws HyracksDataException, IndexException {
+    public ITreeIndex[] mockIndexes() throws HyracksDataException {
         IIndexAccessor[] indexAccessors = mockIndexAccessors();
         ITreeIndex[] indexes = new ITreeIndex[indexAccessors.length * 2];
         int j = 0;
@@ -336,7 +335,7 @@
         return indexes;
     }
 
-    private IIndexAccessor[] mockIndexAccessors() throws HyracksDataException, IndexException {
+    private IIndexAccessor[] mockIndexAccessors() throws HyracksDataException {
         IIndexCursor[] cursors = mockIndexCursors();
         IIndexAccessor[] accessors = new IIndexAccessor[cursors.length * 2];
         int j = 0;
@@ -367,7 +366,7 @@
         return accessors;
     }
 
-    private IIndexCursor[] mockIndexCursors() throws HyracksDataException, IndexException {
+    private IIndexCursor[] mockIndexCursors() throws HyracksDataException {
         ITupleReference[] tuples = mockTuples();
         IIndexCursor[] cursors = new IIndexCursor[tuples.length * 2];
         int j = 0;
@@ -392,7 +391,7 @@
         return new ITupleReference[] { tuple };
     }
 
-    public IIndexDataflowHelper[] mockIndexHelpers() throws HyracksDataException, IndexException {
+    public IIndexDataflowHelper[] mockIndexHelpers() throws HyracksDataException {
         ITreeIndex[] indexes = mockIndexes();
         IIndexDataflowHelper[] indexHelpers = new IIndexDataflowHelper[indexes.length * 2];
         int j = 0;
@@ -412,7 +411,7 @@
         return indexHelpers;
     }
 
-    public IIndexDataflowHelperFactory[] mockIndexHelperFactories() throws HyracksDataException, IndexException {
+    public IIndexDataflowHelperFactory[] mockIndexHelperFactories() throws HyracksDataException {
         IIndexDataflowHelper[] helpers = mockIndexHelpers();
         IIndexDataflowHelperFactory[] indexHelperFactories = new IIndexDataflowHelperFactory[helpers.length];
         for (int i = 0; i < helpers.length; i++) {
@@ -424,7 +423,7 @@
         return indexHelperFactories;
     }
 
-    public AbstractTreeIndexOperatorDescriptor[] mockIndexOpDesc() throws HyracksDataException, IndexException {
+    public AbstractTreeIndexOperatorDescriptor[] mockIndexOpDesc() throws HyracksDataException {
         IIndexDataflowHelperFactory[] indexDataflowHelperFactories = mockIndexHelperFactories();
         ISearchOperationCallbackFactory[] searchOpCallbackFactories = mockSearchOpCallbackFactories();
         AbstractTreeIndexOperatorDescriptor[] opDescs =
@@ -464,7 +463,7 @@
         CountAnswer[] nextFrames = new CountAnswer[] { nextFrameNormal, nextFrameException, nextFrameError };
         CountAnswer[] fails = new CountAnswer[] { failNormal, failException, failError };
         CountAnswer[] closes = new CountAnswer[] { closeNormal, closeException, closeError };
-        List<IFrameWriter> outputWriters = new ArrayList<IFrameWriter>();
+        List<IFrameWriter> outputWriters = new ArrayList<>();
         for (CountAnswer openAnswer : opens) {
             for (CountAnswer nextFrameAnswer : nextFrames) {
                 for (CountAnswer failAnswer : fails) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java
index 884318e..a5fcc6e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java
@@ -125,7 +125,7 @@
      * @throws IndexException
      */
     public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws IndexException;
+            boolean checkIfEmptyIndex) throws HyracksDataException;
 
     /**
      * @return true if the index needs memory components
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java
index 1903222..d981cd7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java
@@ -40,7 +40,7 @@
      *             If an index-specific constraint is violated, e.g., the key
      *             already exists.
      */
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException;
+    public void insert(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Updates the tuple in the index matching the given tuple with the new
@@ -54,7 +54,7 @@
      * @throws IndexException
      *             If there is no matching tuple in the index.
      */
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException;
+    public void update(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Deletes the tuple in the index matching the given tuple.
@@ -66,7 +66,7 @@
      * @throws IndexException
      *             If there is no matching tuple in the index.
      */
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException;
+    public void delete(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * This operation is only supported by indexes with the notion of a unique key.
@@ -81,7 +81,7 @@
      *             If there is no matching tuple in the index.
      *
      */
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+    public void upsert(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Creates a cursor appropriate for passing into search().
@@ -101,5 +101,5 @@
      *             If the BufferCache throws while un/pinning or un/latching.
      * @throws IndexException
      */
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException;
+    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java
index f371522..9d3f657 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java
@@ -32,7 +32,7 @@
      * @throws HyracksDataException
      *             If the BufferCache throws while un/pinning or un/latching.
      */
-    public void add(ITupleReference tuple) throws IndexException, HyracksDataException;
+    public void add(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Finalize the bulk loading operation in the given context.
@@ -41,7 +41,7 @@
      * @throws HyracksDataException
      *             If the BufferCache throws while un/pinning or un/latching.
      */
-    public void end() throws IndexException, HyracksDataException;
+    public void end() throws HyracksDataException;
 
     /**
      * Release all resources held by this bulkloader, with no guarantee of
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java
index 17c4b98..2b5f3f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java
@@ -23,16 +23,48 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public interface IIndexCursor {
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException;
+    /**
+     * Opens the cursor
+     * if open succeeds, close must be called.
+     *
+     * @param initialState
+     * @param searchPred
+     * @throws HyracksDataException
+     */
+    void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException;
 
-    public boolean hasNext() throws HyracksDataException, IndexException;
+    /**
+     * True if the cursor has a next value
+     *
+     * @return
+     * @throws HyracksDataException
+     */
+    boolean hasNext() throws HyracksDataException;
 
-    public void next() throws HyracksDataException;
+    /**
+     * Moves the cursor to the next value
+     *
+     * @throws HyracksDataException
+     */
+    void next() throws HyracksDataException;
 
-    public void close() throws HyracksDataException;
+    /**
+     * Closes the cursor
+     *
+     * @throws HyracksDataException
+     */
+    void close() throws HyracksDataException;
 
-    public void reset() throws HyracksDataException, IndexException;
+    /**
+     * Reset the cursor to be reused
+     *
+     * @throws HyracksDataException
+     * @throws IndexException
+     */
+    void reset() throws HyracksDataException;
 
-    public ITupleReference getTuple();
+    /**
+     * @return the tuple pointed to by the cursor
+     */
+    ITupleReference getTuple();
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java
index 8e7834f..c841f84 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -89,7 +89,7 @@
 
     public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
             IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
-            throws HyracksDataException, TreeIndexException;
+            throws HyracksDataException;
 
     public ISlotManager getSlotManager();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java
index 5a7a4a7..16c0afa 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java
@@ -34,7 +34,7 @@
      * @throws HyracksDataException
      *             If the BufferCache throws while un/pinning or un/latching.
      */
-    public void delete(ITupleReference tuple) throws IndexException, HyracksDataException;
+    public void delete(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Abort the bulk modify op
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/TreeIndexException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/TreeIndexException.java
deleted file mode 100644
index 080752f..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/TreeIndexException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.api;
-
-public class TreeIndexException extends IndexException {
-
-    private static final long serialVersionUID = 1L;
-    private boolean handled = false;
-
-    public TreeIndexException(Exception e) {
-        super(e);
-    }
-
-    public TreeIndexException(String message) {
-        super(message);
-    }
-
-    public void setHandled(boolean handled) {
-        this.handled = handled;
-    }
-
-    public boolean getHandled() {
-        return handled;
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/UnsortedInputException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/UnsortedInputException.java
deleted file mode 100644
index 52a8488..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/UnsortedInputException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.api;
-
-/**
- * Thrown when trying to bulk load an index with an unsorted input stream.
- */
-public class UnsortedInputException extends IndexException {
-    private static final long serialVersionUID = 1L;
-
-    public UnsortedInputException(Exception e) {
-        super(e);
-    }
-
-    public UnsortedInputException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
index 6439279..12da35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
 import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
 
 public class IndexBulkLoadOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
@@ -83,12 +82,7 @@
 
         for (int i = 0; i < tupleCount; i++) {
             tuple.reset(accessor, i);
-
-            try {
-                bulkLoader.add(tuple);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            bulkLoader.add(tuple);
         }
 
         FrameUtils.flushFrame(buffer, writer);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
index 4f01978..0fdbf34 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
@@ -25,6 +25,7 @@
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
@@ -36,8 +37,6 @@
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ITupleFilter;
 import org.apache.hyracks.storage.am.common.api.ITupleFilterFactory;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
@@ -79,8 +78,8 @@
         try {
             writer.open();
             LocalResource resource = indexHelper.getResource();
-            modCallback = opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(resource, ctx,
-                    this);
+            modCallback =
+                    opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(resource, ctx, this);
             indexAccessor = index.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
             ITupleFilterFactory tupleFilterFactory = opDesc.getTupleFilterFactory();
             if (tupleFilterFactory != null) {
@@ -110,8 +109,11 @@
                     case INSERT: {
                         try {
                             indexAccessor.insert(tuple);
-                        } catch (TreeIndexDuplicateKeyException e) {
-                            // ingnore that exception to allow inserting existing keys which becomes an NoOp
+                        } catch (HyracksDataException e) {
+                            // ignore that exception to allow inserting existing keys which becomes an NoOp
+                            if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                                throw e;
+                            }
                         }
                         break;
                     }
@@ -126,8 +128,11 @@
                     case DELETE: {
                         try {
                             indexAccessor.delete(tuple);
-                        } catch (TreeIndexNonExistentKeyException e) {
+                        } catch (HyracksDataException e) {
                             // ingnore that exception to allow deletions of non-existing keys
+                            if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                                throw e;
+                            }
                         }
                         break;
                     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexDuplicateKeyException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexDuplicateKeyException.java
deleted file mode 100644
index 3178740..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexDuplicateKeyException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-
-public class TreeIndexDuplicateKeyException extends TreeIndexException {
-    private static final long serialVersionUID = 1L;
-
-    public TreeIndexDuplicateKeyException(Exception e) {
-        super(e);
-    }
-
-    public TreeIndexDuplicateKeyException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexNonExistentKeyException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexNonExistentKeyException.java
deleted file mode 100644
index bd5e5c4..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexNonExistentKeyException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-
-public class TreeIndexNonExistentKeyException extends TreeIndexException {
-
-    private static final long serialVersionUID = 1L;
-
-    public TreeIndexNonExistentKeyException(Exception e) {
-        super(e);
-    }
-
-    public TreeIndexNonExistentKeyException(String message) {
-        super(message);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java
index 0af7939..2c8429d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java
@@ -34,8 +34,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.storage.common.buffercache.ICachedPage;
@@ -70,10 +68,9 @@
 
     protected int bulkloadLeafStart = 0;
 
-    public AbstractTreeIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
-            IPageManager freePageManager, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount,
-            FileReference file) {
+    public AbstractTreeIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider, IPageManager freePageManager,
+            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+            IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file) {
         this.bufferCache = bufferCache;
         this.fileMapProvider = fileMapProvider;
         this.freePageManager = freePageManager;
@@ -272,8 +269,7 @@
         protected final IFIFOPageQueue queue;
         protected List<ICachedPage> pagesToWrite;
 
-        public AbstractTreeIndexBulkLoader(float fillFactor)
-                throws TreeIndexException, HyracksDataException {
+        public AbstractTreeIndexBulkLoader(float fillFactor) throws HyracksDataException {
             leafFrame = leafFrameFactory.createFrame();
             interiorFrame = interiorFrameFactory.createFrame();
             metaFrame = freePageManager.createMetadataFrame();
@@ -281,7 +277,7 @@
             queue = bufferCache.createFIFOQueue();
 
             if (!isEmptyTree(leafFrame)) {
-                throw new TreeIndexException("Cannot bulk-load a non-empty tree.");
+                throw new HyracksDataException("Cannot bulk-load a non-empty tree.");
             }
 
             this.cmp = MultiComparator.create(cmpFactories);
@@ -293,8 +289,8 @@
 
             NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
             leafFrontier.pageId = freePageManager.takePage(metaFrame);
-            leafFrontier.page = bufferCache
-                    .confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
+            leafFrontier.page =
+                    bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
 
             interiorFrame.setPage(leafFrontier.page);
             interiorFrame.initBuffer((byte) 0);
@@ -310,7 +306,7 @@
         }
 
         @Override
-        public abstract void add(ITupleReference tuple) throws IndexException, HyracksDataException;
+        public abstract void add(ITupleReference tuple) throws HyracksDataException;
 
         protected void handleException() throws HyracksDataException {
             // Unlatch and unpin pages that weren't in the queue to avoid leaking memory.
@@ -356,17 +352,13 @@
         ITreeIndexAccessor accessor;
 
         public TreeIndexInsertBulkLoader() throws HyracksDataException {
-            accessor = (ITreeIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            accessor =
+                    (ITreeIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         }
 
         @Override
         public void add(ITupleReference tuple) throws HyracksDataException {
-            try {
-                accessor.insert(tuple);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            accessor.insert(tuple);
         }
 
         @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java
index cc4bfb9..90faa91 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java
@@ -23,6 +23,7 @@
 import java.util.List;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -44,13 +45,11 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.btree.tuples.LSMBTreeRefrencingTupleWriterFactory;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -100,8 +99,8 @@
             ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
             TreeIndexFactory<BTree> transactionBTreeFactory, int version, boolean durable) {
         super(ioManager, insertLeafFrameFactory, deleteLeafFrameFactory, fileManager, diskBTreeFactory,
-                bulkLoadBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider,
-                fieldCount, cmpFactories, mergePolicy, opTracker, ioScheduler, ioOpCallback, false, durable);
+                bulkLoadBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider, fieldCount,
+                cmpFactories, mergePolicy, opTracker, ioScheduler, ioOpCallback, false, durable);
         this.transactionComponentFactory =
                 new LSMBTreeDiskComponentFactory(transactionBTreeFactory, bloomFilterFactory, null);
         this.secondDiskComponents = new LinkedList<>();
@@ -152,7 +151,7 @@
     // The only reason to override the following method is that it uses a different context object
     @Override
     public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ExternalBTreeOpContext ctx = (ExternalBTreeOpContext) ictx;
         List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
         ctx.searchInitialState.reset(pred, operationalComponents);
@@ -169,7 +168,7 @@
     // in addition, determining whether or not to keep deleted tuples is different here
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ExternalBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, -1);
         opCtx.setOperation(IndexOperation.MERGE);
         List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -249,20 +248,12 @@
         if (diskComponents.size() == 0 && secondDiskComponents.size() == 0) {
             //First time activation
             List<LSMComponentFileReferences> validFileReferences;
-            try {
-                validFileReferences = fileManager.cleanupAndGetValidFiles();
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            validFileReferences = fileManager.cleanupAndGetValidFiles();
             for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
                 LSMBTreeDiskComponent component;
-                try {
-                    component = createDiskComponent(componentFactory,
-                            lsmComonentFileReference.getInsertIndexFileReference(),
-                            lsmComonentFileReference.getBloomFilterFileReference(), false);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                component =
+                        createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+                                lsmComonentFileReference.getBloomFilterFileReference(), false);
                 diskComponents.add(component);
                 secondDiskComponents.add(component);
             }
@@ -395,8 +386,7 @@
 
     // Not supported
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("tuple modify not supported in LSM-Disk-Only-BTree");
     }
 
@@ -409,7 +399,7 @@
 
     // Not supported
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
         throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-BTree");
     }
 
@@ -451,23 +441,15 @@
     // For initial load
     @Override
     public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, false);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, false);
     }
 
     // For transaction bulk load <- could consolidate with the above method ->
     @Override
     public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
     }
 
     // The bulk loader used for both initial loading and transaction
@@ -483,24 +465,16 @@
         private final ITreeIndexTupleWriterFactory frameTupleWriterFactory;
 
         public LSMTwoPCBTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex, boolean isTransaction) throws TreeIndexException, HyracksDataException {
+                boolean checkIfEmptyIndex, boolean isTransaction) throws HyracksDataException {
             this.isTransaction = isTransaction;
             // Create the appropriate target
             if (isTransaction) {
-                try {
-                    component = createTransactionTarget();
-                } catch (HyracksDataException | IndexException e) {
-                    throw new TreeIndexException(e);
-                }
+                component = createTransactionTarget();
             } else {
                 if (checkIfEmptyIndex && !isEmptyIndex()) {
-                    throw new TreeIndexException("Cannot load an index that is not empty");
+                    throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
                 }
-                try {
-                    component = createBulkLoadTarget();
-                } catch (HyracksDataException | IndexException e) {
-                    throw new TreeIndexException(e);
-                }
+                component = createBulkLoadTarget();
             }
 
             frameTupleWriterFactory =
@@ -518,11 +492,11 @@
         // It is expected that the mode was set to insert operation before
         // calling add
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 bulkLoader.add(tuple);
                 builder.add(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -533,7 +507,7 @@
 
         // This is made public in case of a failure, it is better to delete all
         // created artifacts.
-        public void cleanupArtifacts() throws HyracksDataException, IndexException {
+        public void cleanupArtifacts() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
                 cleanedUpArtifacts = true;
                 // We make sure to end the bloom filter load to release latches.
@@ -557,7 +531,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
                 if (!endedBloomFilterLoad) {
                     builder.end();
@@ -583,12 +557,12 @@
         // It is expected that the mode was set to delete operation before
         // calling delete
         @Override
-        public void delete(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             ((LSMBTreeRefrencingTupleWriterFactory) frameTupleWriterFactory).setMode(IndexOperation.DELETE);
             try {
                 bulkLoader.add(tuple);
                 builder.add(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -609,12 +583,12 @@
 
         // This method is used to create a target for a bulk modify operation. This
         // component must then be either committed or deleted
-        private ILSMDiskComponent createTransactionTarget() throws HyracksDataException, IndexException {
+        private ILSMDiskComponent createTransactionTarget() throws HyracksDataException {
             LSMComponentFileReferences componentFileRefs;
             try {
                 componentFileRefs = fileManager.getNewTransactionFileReference();
             } catch (IOException e) {
-                throw new HyracksDataException("Failed to create transaction components", e);
+                throw HyracksDataException.create(e);
             }
             return createDiskComponent(transactionComponentFactory, componentFileRefs.getInsertIndexFileReference(),
                     componentFileRefs.getBloomFilterFileReference(), true);
@@ -680,7 +654,7 @@
     }
 
     @Override
-    public void commitTransaction() throws TreeIndexException, HyracksDataException, IndexException {
+    public void commitTransaction() throws HyracksDataException {
         LSMComponentFileReferences componentFileRefrences = fileManager.getTransactionFileReferenceForCommit();
         LSMBTreeDiskComponent component = null;
         if (componentFileRefrences != null) {
@@ -691,21 +665,13 @@
     }
 
     @Override
-    public void abortTransaction() throws TreeIndexException {
-        try {
-            fileManager.deleteTransactionFiles();
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public void abortTransaction() throws HyracksDataException {
+        fileManager.deleteTransactionFiles();
     }
 
     @Override
-    public void recoverTransaction() throws TreeIndexException {
-        try {
-            fileManager.recoverTransaction();
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public void recoverTransaction() throws HyracksDataException {
+        fileManager.recoverTransaction();
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java
index dc28db4..3be596c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java
@@ -25,6 +25,7 @@
 import java.util.Set;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -47,14 +48,12 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -141,21 +140,13 @@
         if (diskComponents.size() == 0 && secondDiskComponents.size() == 0) {
             //First time activation
             List<LSMComponentFileReferences> validFileReferences;
-            try {
-                validFileReferences = fileManager.cleanupAndGetValidFiles();
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            validFileReferences = fileManager.cleanupAndGetValidFiles();
             for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
                 LSMBTreeWithBuddyDiskComponent component;
-                try {
-                    component = createDiskComponent(componentFactory,
-                            lsmComonentFileReference.getInsertIndexFileReference(),
-                            lsmComonentFileReference.getDeleteIndexFileReference(),
-                            lsmComonentFileReference.getBloomFilterFileReference(), false);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                component =
+                        createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+                                lsmComonentFileReference.getDeleteIndexFileReference(),
+                                lsmComonentFileReference.getBloomFilterFileReference(), false);
                 diskComponents.add(component);
                 secondDiskComponents.add(component);
             }
@@ -299,35 +290,25 @@
     // For initial load
     @Override
     public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
     }
 
     // For transaction bulk load <- could consolidate with the above method ->
     @Override
     public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex,
-                    true);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
     }
 
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("tuple modify not supported in LSM-Disk-Only-BTree");
     }
 
     @Override
     public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ExternalBTreeWithBuddyOpContext ctx = (ExternalBTreeWithBuddyOpContext) ictx;
         List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
         ctx.searchInitialState.setOperationalComponents(operationalComponents);
@@ -337,12 +318,12 @@
     @Override
     public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
             throws HyracksDataException {
-        throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-BTree");
+        throw HyracksDataException.create(ErrorCode.FLUSH_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
     }
 
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
-        throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-BTree");
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
+        throw HyracksDataException.create(ErrorCode.FLUSH_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
     }
 
     protected LSMComponentFileReferences getMergeTargetFileName(List<ILSMComponent> mergingDiskComponents)
@@ -359,7 +340,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ILSMIndexOperationContext bctx = createOpContext(NoOpOperationCallback.INSTANCE, 0);
         bctx.setOperation(IndexOperation.MERGE);
         List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -385,15 +366,14 @@
     }
 
     // This method creates the appropriate opContext for the targeted version
-    public ExternalBTreeWithBuddyOpContext createOpContext(ISearchOperationCallback searchCallback,
-            int targetVersion) {
+    public ExternalBTreeWithBuddyOpContext createOpContext(ISearchOperationCallback searchCallback, int targetVersion) {
         return new ExternalBTreeWithBuddyOpContext(btreeCmpFactories, buddyBtreeCmpFactories, searchCallback,
                 targetVersion, lsmHarness, btreeInteriorFrameFactory, btreeLeafFrameFactory,
                 buddyBtreeLeafFrameFactory);
     }
 
     @Override
-    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
         LSMBTreeWithBuddyMergeOperation mergeOp = (LSMBTreeWithBuddyMergeOperation) operation;
         ITreeIndexCursor cursor = mergeOp.getCursor();
         ISearchPredicate btreeSearchPred = new RangePredicate(null, null, true, true, null, null);
@@ -402,8 +382,8 @@
         search(opCtx, cursor, btreeSearchPred);
 
         LSMBTreeWithBuddyDiskComponent mergedComponent =
-                createDiskComponent(componentFactory, mergeOp.getBTreeMergeTarget(),
-                        mergeOp.getBuddyBTreeMergeTarget(), mergeOp.getBloomFilterMergeTarget(), true);
+                createDiskComponent(componentFactory, mergeOp.getBTreeMergeTarget(), mergeOp.getBuddyBTreeMergeTarget(),
+                        mergeOp.getBloomFilterMergeTarget(), true);
 
         // In case we must keep the deleted-keys BuddyBTrees, then they must be
         // merged *before* merging the b-trees so that
@@ -421,8 +401,8 @@
 
             long numElements = 0L;
             for (int i = 0; i < mergeOp.getMergingComponents().size(); ++i) {
-                numElements += ((LSMBTreeWithBuddyDiskComponent) mergeOp.getMergingComponents().get(i))
-                        .getBloomFilter().getNumElements();
+                numElements += ((LSMBTreeWithBuddyDiskComponent) mergeOp.getMergingComponents().get(i)).getBloomFilter()
+                        .getNumElements();
             }
 
             int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
@@ -613,7 +593,7 @@
 
     private LSMBTreeWithBuddyDiskComponent createDiskComponent(ILSMDiskComponentFactory factory,
             FileReference insertFileRef, FileReference deleteFileRef, FileReference bloomFilterFileRef,
-            boolean createComponent) throws HyracksDataException, IndexException {
+            boolean createComponent) throws HyracksDataException {
         // Create new instance.
         LSMBTreeWithBuddyDiskComponent component = (LSMBTreeWithBuddyDiskComponent) factory
                 .createComponent(new LSMComponentFileReferences(insertFileRef, deleteFileRef, bloomFilterFileRef));
@@ -666,24 +646,16 @@
         private final boolean isTransaction;
 
         public LSMTwoPCBTreeWithBuddyBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex, boolean isTransaction) throws TreeIndexException, HyracksDataException {
+                boolean checkIfEmptyIndex, boolean isTransaction) throws HyracksDataException {
             this.isTransaction = isTransaction;
             // Create the appropriate target
             if (isTransaction) {
-                try {
-                    component = createTransactionTarget();
-                } catch (HyracksDataException | IndexException e) {
-                    throw new TreeIndexException(e);
-                }
+                component = createTransactionTarget();
             } else {
                 if (checkIfEmptyIndex && !isEmptyIndex()) {
-                    throw new TreeIndexException("Cannot load an index that is not empty");
+                    throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
                 }
-                try {
-                    component = createBulkLoadTarget();
-                } catch (HyracksDataException | IndexException e) {
-                    throw new TreeIndexException(e);
-                }
+                component = createBulkLoadTarget();
             }
 
             // Create the three loaders
@@ -699,10 +671,10 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 btreeBulkLoader.add(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -738,7 +710,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
                 if (!endedBloomFilterLoad) {
                     builder.end();
@@ -765,11 +737,11 @@
         }
 
         @Override
-        public void delete(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             try {
                 buddyBtreeBulkLoader.add(tuple);
                 builder.add(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -788,12 +760,12 @@
 
         // This method is used to create a target for a bulk modify operation. This
         // component must then eventually be either committed or deleted
-        private ILSMDiskComponent createTransactionTarget() throws HyracksDataException, IndexException {
+        private ILSMDiskComponent createTransactionTarget() throws HyracksDataException {
             LSMComponentFileReferences componentFileRefs;
             try {
                 componentFileRefs = fileManager.getNewTransactionFileReference();
             } catch (IOException e) {
-                throw new HyracksDataException("Failed to create transaction components", e);
+                throw HyracksDataException.create(e);
             }
             return createDiskComponent(bulkComponentFactory, componentFileRefs.getInsertIndexFileReference(),
                     componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
@@ -801,11 +773,10 @@
         }
     }
 
-    protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+    protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
         LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
         return createDiskComponent(bulkComponentFactory, componentFileRefs.getInsertIndexFileReference(),
-                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
-                true);
+                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
     }
 
     @Override
@@ -842,7 +813,7 @@
     }
 
     @Override
-    public void commitTransaction() throws HyracksDataException, IndexException {
+    public void commitTransaction() throws HyracksDataException {
         LSMComponentFileReferences componentFileRefrences = fileManager.getTransactionFileReferenceForCommit();
         LSMBTreeWithBuddyDiskComponent component = null;
         if (componentFileRefrences != null) {
@@ -854,21 +825,13 @@
     }
 
     @Override
-    public void abortTransaction() throws TreeIndexException {
-        try {
-            fileManager.deleteTransactionFiles();
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public void abortTransaction() throws HyracksDataException {
+        fileManager.deleteTransactionFiles();
     }
 
     @Override
-    public void recoverTransaction() throws TreeIndexException {
-        try {
-            fileManager.recoverTransaction();
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public void recoverTransaction() throws HyracksDataException {
+        fileManager.recoverTransaction();
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index 3ecb5e0..04c895f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -25,6 +25,7 @@
 import java.util.Set;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -48,9 +49,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -177,20 +175,11 @@
         List<ILSMDiskComponent> immutableComponents = diskComponents;
         immutableComponents.clear();
         List<LSMComponentFileReferences> validFileReferences;
-        try {
-            validFileReferences = fileManager.cleanupAndGetValidFiles();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        validFileReferences = fileManager.cleanupAndGetValidFiles();
         for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
             LSMBTreeDiskComponent component;
-            try {
-                component =
-                        createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
-                                lsmComonentFileReference.getBloomFilterFileReference(), false);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+                    lsmComonentFileReference.getBloomFilterFileReference(), false);
             immutableComponents.add(component);
         }
         isActivated = true;
@@ -204,8 +193,7 @@
 
         if (flushOnExit) {
             BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback);
-            ILSMIndexAccessor accessor =
-                    createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+            ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             accessor.scheduleFlush(cb);
             try {
                 cb.waitForIO();
@@ -326,8 +314,7 @@
     }
 
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx;
 
         ITupleReference indexTuple;
@@ -356,7 +343,7 @@
         }
     }
 
-    private boolean insert(ITupleReference tuple, LSMBTreeOpContext ctx) throws HyracksDataException, IndexException {
+    private boolean insert(ITupleReference tuple, LSMBTreeOpContext ctx) throws HyracksDataException {
         LSMBTreePointSearchCursor searchCursor = ctx.insertSearchCursor;
         IIndexCursor memCursor = ctx.memCursor;
         RangePredicate predicate = (RangePredicate) ctx.getSearchPredicate();
@@ -370,7 +357,7 @@
                     memCursor.next();
                     LSMBTreeTupleReference lsmbtreeTuple = (LSMBTreeTupleReference) memCursor.getTuple();
                     if (!lsmbtreeTuple.isAntimatter()) {
-                        throw new TreeIndexDuplicateKeyException("Failed to insert key since key already exists.");
+                        throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
                     } else {
                         memCursor.close();
                         ctx.currentMutableBTreeAccessor.upsertIfConditionElseInsert(tuple,
@@ -394,7 +381,7 @@
             search(ctx, searchCursor, predicate);
             try {
                 if (searchCursor.hasNext()) {
-                    throw new TreeIndexDuplicateKeyException("Failed to insert key since key already exists.");
+                    throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
                 }
             } finally {
                 searchCursor.close();
@@ -408,7 +395,7 @@
 
     @Override
     public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx;
         List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
         ctx.searchInitialState.reset(pred, operationalComponents);
@@ -431,7 +418,7 @@
     }
 
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
         LSMBTreeFlushOperation flushOp = (LSMBTreeFlushOperation) operation;
         LSMBTreeMemoryComponent flushingComponent = (LSMBTreeMemoryComponent) flushOp.getFlushingComponent();
         IIndexAccessor accessor = flushingComponent.getBTree().createAccessor(NoOpOperationCallback.INSTANCE,
@@ -506,7 +493,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         opCtx.setOperation(IndexOperation.MERGE);
         List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -529,7 +516,7 @@
     }
 
     @Override
-    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
         LSMBTreeMergeOperation mergeOp = (LSMBTreeMergeOperation) operation;
         ITreeIndexCursor cursor = mergeOp.getCursor();
         RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
@@ -587,7 +574,7 @@
 
     protected LSMBTreeDiskComponent createDiskComponent(LSMBTreeDiskComponentFactory factory,
             FileReference btreeFileRef, FileReference bloomFilterFileRef, boolean createComponent)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         // Create new BTree instance.
         LSMBTreeDiskComponent component =
                 factory.createComponent(new LSMComponentFileReferences(btreeFileRef, null, bloomFilterFileRef));
@@ -610,15 +597,11 @@
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
     }
 
-    protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+    protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
         LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
         return createDiskComponent(bulkLoadComponentFactory, componentFileRefs.getInsertIndexFileReference(),
                 componentFileRefs.getBloomFilterFileReference(), true);
@@ -647,15 +630,11 @@
         public final MultiComparator filterCmp;
 
         public LSMBTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException {
+                boolean checkIfEmptyIndex) throws HyracksDataException {
             if (checkIfEmptyIndex && !isEmptyIndex()) {
-                throw new TreeIndexException("Cannot load an index that is not empty");
+                throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
             }
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException | IndexException e) {
-                throw new TreeIndexException(e);
-            }
+            component = createBulkLoadTarget();
             bulkLoader = (BTreeBulkLoader) ((LSMBTreeDiskComponent) component).getBTree().createBulkLoader(fillFactor,
                     verifyInput, numElementsHint, false);
 
@@ -681,7 +660,7 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 ITupleReference t;
                 if (indexTuple != null) {
@@ -700,7 +679,7 @@
                     filterTuple.reset(tuple);
                     component.getLSMComponentFilter().update(filterTuple, filterCmp);
                 }
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -726,7 +705,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
                 if (hasBloomFilter && !endedBloomFilterLoad) {
                     builder.end();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
index 4a12b5a..194eb3a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
@@ -34,7 +34,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
 import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
@@ -82,7 +81,7 @@
     };
 
     @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
         List<LSMComponentFileReferences> validFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
index a2ed40b..c57c35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -67,7 +66,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.flush(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
index 505a5f1..fbf6c5c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -79,7 +78,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.merge(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
index 2fb96c3..77c3573 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
@@ -32,7 +32,6 @@
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
@@ -63,7 +62,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         if (nextHasBeenCalled) {
             return false;
         } else if (foundTuple) {
@@ -125,7 +124,7 @@
     }
 
     @Override
-    public void reset() throws HyracksDataException, IndexException {
+    public void reset() throws HyracksDataException {
         try {
             if (rangeCursors != null) {
                 for (int i = 0; i < rangeCursors.length; ++i) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
index d6c12e2..1b8c151 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
@@ -34,7 +34,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
@@ -62,7 +61,7 @@
     }
 
     @Override
-    public void reset() throws HyracksDataException, IndexException {
+    public void reset() throws HyracksDataException {
         super.reset();
         proceed = true;
     }
@@ -75,7 +74,7 @@
     }
 
     @Override
-    protected void checkPriorityQueue() throws HyracksDataException, IndexException {
+    protected void checkPriorityQueue() throws HyracksDataException {
         while (!outputPriorityQueue.isEmpty() || needPush == true) {
             if (!outputPriorityQueue.isEmpty()) {
                 PriorityQueueElement checkElement = outputPriorityQueue.peek();
@@ -178,8 +177,7 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred)
-            throws HyracksDataException, IndexException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
         cmp = lsmInitialState.getOriginalKeyComparator();
         operationalComponents = lsmInitialState.getOperationalComponents();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
index 5d5501d..a368ee9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.storage.common.buffercache.ICachedPage;
@@ -47,42 +46,16 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
-
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
-
-        LSMBTreeSearchType searchType = LSMBTreeSearchType.RANGE;
         RangePredicate btreePred = (RangePredicate) searchPred;
-        if (btreePred.getLowKey() != null && btreePred.getHighKey() != null) {
-            if (btreePred.isLowKeyInclusive() && btreePred.isHighKeyInclusive()) {
-                if (btreePred.getLowKeyComparator().getKeyFieldCount() == btreePred.getHighKeyComparator()
-                        .getKeyFieldCount()) {
-                    if (btreePred.getLowKeyComparator().getKeyFieldCount() == lsmInitialState
-                            .getOriginalKeyComparator().getKeyFieldCount()) {
-                        if (lsmInitialState.getOriginalKeyComparator().compare(btreePred.getLowKey(),
-                                btreePred.getHighKey()) == 0) {
-                            searchType = LSMBTreeSearchType.POINT;
-                        }
-                    }
-                }
-            }
-        }
-        switch (searchType) {
-            case POINT:
-                currentCursor = pointCursor;
-                break;
-            case RANGE:
-                currentCursor = rangeCursor;
-                break;
-            default:
-                throw new HyracksDataException("Wrong search type");
-        }
+        currentCursor =
+                btreePred.isPointPredicate(lsmInitialState.getOriginalKeyComparator()) ? pointCursor : rangeCursor;
         currentCursor.open(lsmInitialState, searchPred);
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         return currentCursor.hasNext();
     }
 
@@ -100,7 +73,7 @@
     }
 
     @Override
-    public void reset() throws HyracksDataException, IndexException {
+    public void reset() throws HyracksDataException {
         if (currentCursor != null) {
             currentCursor.reset();
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java
index dab48f7..0f03085 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -72,8 +71,7 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred)
-            throws IndexException, HyracksDataException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
 
         LSMBTreeWithBuddyCursorInitialState lsmInitialState = (LSMBTreeWithBuddyCursorInitialState) initialState;
         btreeCmp = lsmInitialState.getBTreeCmp();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
index 683c2e9..0173c06 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
@@ -33,7 +33,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
 import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
@@ -74,8 +73,8 @@
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
         return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
-                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createFlushFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
+                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -88,12 +87,12 @@
         // Get the range of timestamps by taking the earliest and the latest
         // timestamps
         return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING),
-                createMergeFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createMergeFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
+                createMergeFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING),
+                createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
         List<LSMComponentFileReferences> validFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allBuddyBTreeFiles = new ArrayList<>();
@@ -178,7 +177,8 @@
                 invalidBloomFilterFile.delete();
             } else {
                 // This scenario should not be possible.
-                throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+                throw new HyracksDataException(
+                        "Found LSM files with overlapping but not contained timetamp intervals.");
             }
         }
 
@@ -210,8 +210,8 @@
 
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
-                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createFlushFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
+                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -230,8 +230,8 @@
             // get the actual transaction files
             files = dir.list(transactionFilter);
             if (files.length < 3) {
-                throw new HyracksDataException("LSM Btree with buddy transaction has less than 3 files :"
-                        + files.length);
+                throw new HyracksDataException(
+                        "LSM Btree with buddy transaction has less than 3 files :" + files.length);
             }
             try {
                 Files.delete(Paths.get(txnFileName));
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java
index 2810b62..0806834 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -85,7 +84,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.merge(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java
index a109f3f..768ef27 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java
@@ -22,11 +22,10 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 
-public class LSMBTreeWithBuddySearchCursor extends LSMBTreeWithBuddyAbstractCursor{
+public class LSMBTreeWithBuddySearchCursor extends LSMBTreeWithBuddyAbstractCursor {
     private int currentCursor;
     private PermutingTupleReference buddyBTreeTuple;
 
@@ -64,17 +63,13 @@
 
     private void searchNextCursor() throws HyracksDataException {
         if (currentCursor < numberOfTrees) {
-            try {
-                btreeCursors[currentCursor].reset();
-                btreeAccessors[currentCursor].search(btreeCursors[currentCursor], btreeRangePredicate);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            btreeCursors[currentCursor].reset();
+            btreeAccessors[currentCursor].search(btreeCursors[currentCursor], btreeRangePredicate);
         }
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         if (foundNext) {
             return true;
         }
@@ -117,7 +112,7 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         super.open(initialState, searchPred);
         searchNextCursor();
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java
index d045059..5e356c4 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java
@@ -21,12 +21,10 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 
-public class LSMBTreeWithBuddySortedCursor extends
-        LSMBTreeWithBuddyAbstractCursor {
+public class LSMBTreeWithBuddySortedCursor extends LSMBTreeWithBuddyAbstractCursor {
     // TODO: This class can be removed and instead use a search cursor that uses
     // a logic similar
     // to the one in LSMRTreeWithAntiMatterTuplesSearchCursor
@@ -35,8 +33,8 @@
     private int foundIn = -1;
     private PermutingTupleReference buddyBtreeTuple;
 
-    public LSMBTreeWithBuddySortedCursor(ILSMIndexOperationContext opCtx,
-            int[] buddyBTreeFields) throws HyracksDataException {
+    public LSMBTreeWithBuddySortedCursor(ILSMIndexOperationContext opCtx, int[] buddyBTreeFields)
+            throws HyracksDataException {
         super(opCtx);
         this.buddyBtreeTuple = new PermutingTupleReference(buddyBTreeFields);
         reset();
@@ -53,12 +51,7 @@
         try {
             for (int i = 0; i < numberOfTrees; i++) {
                 btreeCursors[i].reset();
-                try {
-                    btreeAccessors[i].search(btreeCursors[i],
-                            btreeRangePredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
                 if (btreeCursors[i].hasNext()) {
                     btreeCursors[i].next();
                 } else {
@@ -67,9 +60,7 @@
             }
         } catch (Exception e) {
             e.printStackTrace();
-            throw new HyracksDataException(
-                    "error while reseting the btrees of the lsm btree with buddy btree",
-                    e);
+            throw new HyracksDataException("error while reseting the btrees of the lsm btree with buddy btree", e);
         } finally {
             if (open) {
                 lsmHarness.endSearch(opCtx);
@@ -78,7 +69,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         while (!foundNext) {
             frameTuple = null;
 
@@ -92,8 +83,9 @@
 
             foundIn = -1;
             for (int i = 0; i < numberOfTrees; i++) {
-                if (depletedBtreeCursors[i])
+                if (depletedBtreeCursors[i]) {
                     continue;
+                }
 
                 if (frameTuple == null) {
                     frameTuple = btreeCursors[i].getTuple();
@@ -107,21 +99,17 @@
                 }
             }
 
-            if (foundIn == -1)
+            if (foundIn == -1) {
                 return false;
+            }
 
             boolean killed = false;
             buddyBtreeTuple.reset(frameTuple);
             for (int i = 0; i < foundIn; i++) {
-                try {
-                    buddyBtreeCursors[i].reset();
-                    buddyBtreeRangePredicate.setHighKey(buddyBtreeTuple, true);
-                    btreeRangePredicate.setLowKey(buddyBtreeTuple, true);
-                    btreeAccessors[i].search(btreeCursors[i],
-                            btreeRangePredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                buddyBtreeCursors[i].reset();
+                buddyBtreeRangePredicate.setHighKey(buddyBtreeTuple, true);
+                btreeRangePredicate.setLowKey(buddyBtreeTuple, true);
+                btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
                 try {
                     if (btreeCursors[i].hasNext()) {
                         killed = true;
@@ -145,20 +133,14 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState,
-            ISearchPredicate searchPred) throws HyracksDataException,
-            IndexException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         super.open(initialState, searchPred);
 
         depletedBtreeCursors = new boolean[numberOfTrees];
         foundNext = false;
         for (int i = 0; i < numberOfTrees; i++) {
             btreeCursors[i].reset();
-            try {
-                btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
             if (btreeCursors[i].hasNext()) {
                 btreeCursors[i].next();
             } else {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java
index 0496809..d60a620 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java
@@ -28,7 +28,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
@@ -41,15 +40,12 @@
     }
 
     @Override
-    protected boolean isDeleted(PriorityQueueElement checkElement)
-            throws HyracksDataException, IndexException {
+    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
         return false;
     }
 
     @Override
-    public void open(ICursorInitialState initialState,
-            ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         LSMBTreeWithBuddyCursorInitialState lsmInitialState = (LSMBTreeWithBuddyCursorInitialState) initialState;
         cmp = lsmInitialState.getBuddyBTreeCmp();
         operationalComponents = lsmInitialState.getOperationalComponents();
@@ -60,19 +56,15 @@
         int numBTrees = operationalComponents.size();
         rangeCursors = new IIndexCursor[numBTrees];
 
-        RangePredicate btreePredicate = new RangePredicate(null, null, true,
-                true, cmp, cmp);
+        RangePredicate btreePredicate = new RangePredicate(null, null, true, true, cmp, cmp);
         IIndexAccessor[] btreeAccessors = new ITreeIndexAccessor[numBTrees];
         for (int i = 0; i < numBTrees; i++) {
             ILSMComponent component = operationalComponents.get(i);
-            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState
-                    .getBuddyBTreeLeafFrameFactory().createFrame();
+            IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getBuddyBTreeLeafFrameFactory().createFrame();
             rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
-            BTree buddyBtree = (BTree) ((LSMBTreeWithBuddyDiskComponent) component)
-                    .getBuddyBTree();
-            btreeAccessors[i] = buddyBtree.createAccessor(
-                    NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            BTree buddyBtree = ((LSMBTreeWithBuddyDiskComponent) component).getBuddyBTree();
+            btreeAccessors[i] =
+                    buddyBtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             btreeAccessors[i].search(rangeCursors[i], btreePredicate);
         }
         setPriorityQueueComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java
index f21c8a3..298d75d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 
 public interface ILSMHarness {
 
@@ -39,7 +38,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException, IndexException;
+    void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Modify the index if the memory component is not full, wait for a new memory component if the current one is full
@@ -55,7 +54,7 @@
      * @throws IndexException
      */
     boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     /**
      * Search the index
@@ -70,7 +69,7 @@
      * @throws IndexException
      */
     void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     /**
      * End the search
@@ -89,7 +88,7 @@
      * @throws IndexException
      */
     void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     /**
      * Schedule full merge
@@ -100,7 +99,7 @@
      * @throws IndexException
      */
     void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     /**
      * Perform a merge operation
@@ -110,7 +109,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException, IndexException;
+    void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException;
 
     /**
      * Schedule a flush
@@ -129,7 +128,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException, IndexException;
+    void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException;
 
     /**
      * Add bulk loaded component
@@ -139,7 +138,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void addBulkLoadedComponent(ILSMDiskComponent index) throws HyracksDataException, IndexException;
+    void addBulkLoadedComponent(ILSMDiskComponent index) throws HyracksDataException;
 
     /**
      * Get index operation tracker
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
index d768122..829523c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 
 public interface ILSMIOOperation extends Callable<Boolean> {
 
@@ -37,7 +36,7 @@
     Set<IODeviceHandle> getWriteDevices();
 
     @Override
-    Boolean call() throws HyracksDataException, IndexException;
+    Boolean call() throws HyracksDataException;
 
     ILSMIOOperationCallback getCallback();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
index 234006f..344674f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMHarness;
 
 /**
@@ -62,19 +61,17 @@
 
     boolean isPrimaryIndex();
 
-    void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException;
+    void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException;
 
-    void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException;
+    void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred) throws HyracksDataException;
 
     void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException;
 
-    ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+    ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException;
 
-    void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException;
+    void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException;
 
-    ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+    ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException;
 
     void addDiskComponent(ILSMDiskComponent index) throws HyracksDataException;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
index b0f366b..a04c54e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
@@ -24,8 +24,6 @@
 import org.apache.hyracks.data.std.api.IValueReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 
 /**
  * Client handle for performing operations
@@ -55,7 +53,7 @@
      * @throws IndexException
      */
     void scheduleMerge(ILSMIOOperationCallback callback, List<ILSMDiskComponent> components)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     /**
      * Schedule a full merge
@@ -65,7 +63,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException;
+    void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException;
 
     /**
      * Delete the tuple from the memory component only. Don't replace with antimatter tuple
@@ -75,7 +73,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+    void physicalDelete(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Attempts to insert the given tuple.
@@ -91,7 +89,7 @@
      *             If an index-specific constraint is violated, e.g., the key
      *             already exists.
      */
-    boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+    boolean tryInsert(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Attempts to delete the given tuple.
@@ -106,7 +104,7 @@
      * @throws IndexException
      *             If there is no matching tuple in the index.
      */
-    boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+    boolean tryDelete(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Attempts to update the given tuple.
@@ -122,7 +120,7 @@
      * @throws IndexException
      *             If there is no matching tuple in the index.
      */
-    boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException;
+    boolean tryUpdate(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * This operation is only supported by indexes with the notion of a unique key.
@@ -139,7 +137,7 @@
      * @throws IndexException
      *             If there is no matching tuple in the index.
      */
-    boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+    boolean tryUpsert(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Delete the tuple from the memory component only. Don't replace with antimatter tuple
@@ -150,7 +148,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+    void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Insert a new tuple (failing if duplicate key entry is found)
@@ -160,7 +158,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+    void forceInsert(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Force deleting an index entry even if the memory component is full
@@ -171,7 +169,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+    void forceDelete(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Force upserting the tuple into the memory component even if it is full
@@ -180,7 +178,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    void forceUpsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+    void forceUpsert(ITupleReference tuple) throws HyracksDataException;
 
     /**
      * Schedule a replication for disk components
@@ -202,7 +200,7 @@
      * @throws HyracksDataException
      * @throws TreeIndexException
      */
-    void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+    void flush(ILSMIOOperation operation) throws HyracksDataException;
 
     /**
      * Merge all on-disk components.
@@ -210,7 +208,7 @@
      * @throws HyracksDataException
      * @throws TreeIndexException
      */
-    void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+    void merge(ILSMIOOperation operation) throws HyracksDataException;
 
     /**
      * Update the metadata of the memory component, wait for the new component if the current one is UNWRITABLE
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
index ce31e2e..54d64af 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
@@ -24,7 +24,6 @@
 import java.util.List;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
 
 /**
@@ -48,7 +47,7 @@
 
     // Deletes invalid files, and returns list of valid files from baseDir.
     // The returned valid files are correctly sorted (based on the recency of data).
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException;
+    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException;
 
     public Comparator<String> getFileNameComparator();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
index c2bd45d..83ba5b7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
@@ -22,10 +22,9 @@
 import java.util.Map;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 
 public interface ILSMMergePolicy {
-    void diskComponentAdded(ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException, IndexException;
+    void diskComponentAdded(ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException;
 
     void configure(Map<String, String> properties);
 
@@ -54,5 +53,5 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    boolean isMergeLagging(ILSMIndex index) throws HyracksDataException, IndexException;
+    boolean isMergeLagging(ILSMIndex index) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java
index 65e91b2..5679dc9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java
@@ -23,8 +23,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 
 // An interface containing the new methods introduced for bulk transactions
 public interface ITwoPCIndex {
@@ -33,28 +31,27 @@
      * and the bulk loaded component is hidden from the index
      */
     public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException;
+            boolean checkIfEmptyIndex) throws HyracksDataException;
 
     /**
      * This function is used to commit the previous transaction if it was resulted in creating any components
      */
-    public void commitTransaction() throws TreeIndexException, HyracksDataException, IndexException;
+    public void commitTransaction() throws HyracksDataException;
 
     /**
      * This function is used to abort the last transaction
      */
-    public void abortTransaction() throws TreeIndexException;
+    public void abortTransaction() throws HyracksDataException;
 
     /**
      * This function is used to recover a transaction if the system crashed after the decision to commit
      */
-    public void recoverTransaction() throws TreeIndexException;
+    public void recoverTransaction() throws HyracksDataException;
 
     /**
      * This function is used to add the committed disk component to the appropriate list and reflect the changes
      */
-    public void commitTransactionDiskComponent(ILSMDiskComponent newComponent)
-            throws IndexException, HyracksDataException;
+    public void commitTransactionDiskComponent(ILSMDiskComponent newComponent) throws HyracksDataException;
 
     /**
      * This function is used to create a version specific accessor to search a specific version
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
index 36c08668..fca010f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
@@ -41,7 +41,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.storage.common.buffercache.ICachedPage;
@@ -121,7 +120,7 @@
 
     protected void cleanupAndGetValidFilesInternal(FilenameFilter filter,
             TreeIndexFactory<? extends ITreeIndex> treeFactory, ArrayList<ComparableFileName> allFiles)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         String[] files = listDirFiles(baseDir, filter);
         File dir = new File(baseDir);
         for (String fileName : files) {
@@ -159,8 +158,7 @@
     }
 
     protected void validateFiles(HashSet<String> groundTruth, ArrayList<ComparableFileName> validFiles,
-            FilenameFilter filter, TreeIndexFactory<? extends ITreeIndex> treeFactory)
-            throws HyracksDataException, IndexException {
+            FilenameFilter filter, TreeIndexFactory<? extends ITreeIndex> treeFactory) throws HyracksDataException {
         ArrayList<ComparableFileName> tmpAllInvListsFiles = new ArrayList<>();
         cleanupAndGetValidFilesInternal(filter, treeFactory, tmpAllInvListsFiles);
         for (ComparableFileName cmpFileName : tmpAllInvListsFiles) {
@@ -228,7 +226,7 @@
     }
 
     @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
         List<LSMComponentFileReferences> validFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allFiles = new ArrayList<>();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
index bfe7fc0..7cbe35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
@@ -196,6 +196,9 @@
     public void reset() throws HyracksDataException {
         isModified.set(false);
         metadata.reset();
+        if (filter != null) {
+            filter.reset();
+        }
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
index b3e1f6f..767699d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
@@ -23,11 +23,10 @@
 import java.util.Map;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
@@ -36,8 +35,7 @@
     private int numComponents;
 
     @Override
-    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
-            throws HyracksDataException, IndexException {
+    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
         List<ILSMDiskComponent> immutableComponents = index.getImmutableComponents();
 
         if (!areComponentsMergable(immutableComponents)) {
@@ -61,7 +59,7 @@
     }
 
     @Override
-    public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException, IndexException {
+    public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException {
         // see PrefixMergePolicy.isMergeLagging() for the rationale behind this code.
 
         /**
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
index c6346cc..e6c8186 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
@@ -22,15 +22,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
@@ -114,7 +114,7 @@
     }
 
     private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMDiskComponent newComponent,
-            boolean failedOperation) throws HyracksDataException, IndexException {
+            boolean failedOperation) throws HyracksDataException {
         /**
          * FLUSH and MERGE operations should always exit the components
          * to notify waiting threads.
@@ -168,25 +168,24 @@
     }
 
     @Override
-    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
-        throw new IndexException("2PC LSM Inedx doesn't support modify");
+    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException {
+        throw HyracksDataException.create(ErrorCode.MODIFY_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
     }
 
     @Override
     public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
-        throw new IndexException("2PC LSM Inedx doesn't support modify");
+            throws HyracksDataException {
+        throw HyracksDataException.create(ErrorCode.MODIFY_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
     }
 
     @Override
     public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMOperationType opType = LSMOperationType.SEARCH;
         getAndEnterComponents(ctx, opType, false);
         try {
             lsmIndex.search(ctx, cursor, pred);
-        } catch (HyracksDataException | IndexException e) {
+        } catch (Exception e) {
             exitComponents(ctx, opType, null, true);
             throw e;
         }
@@ -197,7 +196,7 @@
         if (ctx.getOperation() == IndexOperation.SEARCH) {
             try {
                 exitComponents(ctx, LSMOperationType.SEARCH, null, false);
-            } catch (IndexException e) {
+            } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
         }
@@ -205,7 +204,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
             callback.afterFinalize(LSMOperationType.MERGE, null);
             return;
@@ -215,7 +214,7 @@
 
     @Override
     public void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         fullMergeIsRequested.set(true);
         if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
             // If the merge cannot be scheduled because there is already an ongoing merge on subset/all of the components, then
@@ -228,8 +227,7 @@
     }
 
     @Override
-    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
+    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
         }
@@ -249,7 +247,7 @@
     }
 
     @Override
-    public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException, IndexException {
+    public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException {
         lsmIndex.markAsValid(c);
         synchronized (opTracker) {
             lsmIndex.addDiskComponent(c);
@@ -268,7 +266,7 @@
     // 1. this needs synchronization since others might be accessing the index (specifically merge operations that might change the lists of components)
     // 2. the actions taken by the index itself are different
     // 3. the component has already been marked valid by the bulk update operation
-    public void addTransactionComponents(ILSMDiskComponent newComponent) throws HyracksDataException, IndexException {
+    public void addTransactionComponents(ILSMDiskComponent newComponent) throws HyracksDataException {
         ITwoPCIndex index = (ITwoPCIndex) lsmIndex;
         synchronized (opTracker) {
             List<ILSMDiskComponent> newerList;
@@ -305,8 +303,7 @@
     }
 
     @Override
-    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
+    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 01e85d7..494ba27 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -32,7 +32,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -189,7 +188,7 @@
     }
 
     private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMDiskComponent newComponent,
-            boolean failedOperation) throws HyracksDataException, IndexException {
+            boolean failedOperation) throws HyracksDataException {
         /**
          * FLUSH and MERGE operations should always exit the components
          * to notify waiting threads.
@@ -348,15 +347,14 @@
     }
 
     @Override
-    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException {
         LSMOperationType opType = LSMOperationType.FORCE_MODIFICATION;
         modify(ctx, false, tuple, opType);
     }
 
     @Override
     public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMOperationType opType = LSMOperationType.MODIFICATION;
         return modify(ctx, tryOperation, tuple, opType);
     }
@@ -378,8 +376,6 @@
     private void exitAndComplete(ILSMIndexOperationContext ctx, LSMOperationType op) throws HyracksDataException {
         try {
             exitComponents(ctx, op, null, false);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
         } finally {
             opTracker.completeOperation(null, op, null, ctx.getModificationCallback());
         }
@@ -400,7 +396,7 @@
     }
 
     private boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple,
-            LSMOperationType opType) throws HyracksDataException, IndexException {
+            LSMOperationType opType) throws HyracksDataException {
         if (!lsmIndex.isMemoryComponentsAllocated()) {
             lsmIndex.allocateMemoryComponents();
         }
@@ -424,14 +420,14 @@
 
     @Override
     public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMOperationType opType = LSMOperationType.SEARCH;
         ctx.setSearchPredicate(pred);
         getAndEnterComponents(ctx, opType, false);
         try {
             ctx.getSearchOperationCallback().before(pred.getLowKey());
             lsmIndex.search(ctx, cursor, pred);
-        } catch (HyracksDataException | IndexException e) {
+        } catch (Exception e) {
             exitComponents(ctx, opType, null, true);
             throw e;
         }
@@ -442,7 +438,7 @@
         if (ctx.getOperation() == IndexOperation.SEARCH) {
             try {
                 exitComponents(ctx, LSMOperationType.SEARCH, null, false);
-            } catch (IndexException e) {
+            } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
         }
@@ -459,8 +455,7 @@
     }
 
     @Override
-    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
+    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Started a flush operation for index: " + lsmIndex + " ...");
         }
@@ -484,7 +479,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
             callback.afterFinalize(LSMOperationType.MERGE, null);
             return;
@@ -494,7 +489,7 @@
 
     @Override
     public void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         fullMergeIsRequested.set(true);
         if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
             // If the merge cannot be scheduled because there is already an ongoing merge on subset/all of the components, then
@@ -507,8 +502,7 @@
     }
 
     @Override
-    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
-            throws HyracksDataException, IndexException {
+    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
         }
@@ -531,7 +525,7 @@
     }
 
     @Override
-    public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException, IndexException {
+    public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException {
         lsmIndex.markAsValid(c);
         synchronized (opTracker) {
             lsmIndex.addDiskComponent(c);
@@ -570,11 +564,7 @@
 
     @Override
     public void endReplication(ILSMIndexOperationContext ctx) throws HyracksDataException {
-        try {
-            exitComponents(ctx, LSMOperationType.REPLICATE, null, false);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        exitComponents(ctx, LSMOperationType.REPLICATE, null, false);
     }
 
     protected void validateOperationEnterComponentsState(ILSMIndexOperationContext ctx) throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
index befdd85..4c3a577 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
@@ -62,10 +61,10 @@
         return opCtx;
     }
 
-    public void initPriorityQueue() throws HyracksDataException, IndexException {
+    public void initPriorityQueue() throws HyracksDataException {
         int pqInitSize = (rangeCursors.length > 0) ? rangeCursors.length : 1;
         if (outputPriorityQueue == null) {
-            outputPriorityQueue = new PriorityQueue<PriorityQueueElement>(pqInitSize, pqCmp);
+            outputPriorityQueue = new PriorityQueue<>(pqInitSize, pqCmp);
             pqes = new PriorityQueueElement[pqInitSize];
             for (int i = 0; i < pqInitSize; i++) {
                 pqes[i] = new PriorityQueueElement(i);
@@ -98,7 +97,7 @@
     }
 
     @Override
-    public void reset() throws HyracksDataException, IndexException {
+    public void reset() throws HyracksDataException {
         outputElement = null;
         needPush = false;
 
@@ -121,7 +120,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         checkPriorityQueue();
         return !outputPriorityQueue.isEmpty();
     }
@@ -170,7 +169,7 @@
         return outputElement.getTuple();
     }
 
-    protected boolean pushIntoPriorityQueue(PriorityQueueElement e) throws HyracksDataException, IndexException {
+    protected boolean pushIntoPriorityQueue(PriorityQueueElement e) throws HyracksDataException {
         int cursorIndex = e.getCursorIndex();
         if (rangeCursors[cursorIndex].hasNext()) {
             rangeCursors[cursorIndex].next();
@@ -182,11 +181,11 @@
         return false;
     }
 
-    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
         return ((ILSMTreeTupleReference) checkElement.getTuple()).isAntimatter();
     }
 
-    protected void checkPriorityQueue() throws HyracksDataException, IndexException {
+    protected void checkPriorityQueue() throws HyracksDataException {
         while (!outputPriorityQueue.isEmpty() || (needPush == true)) {
             if (!outputPriorityQueue.isEmpty()) {
                 PriorityQueueElement checkElement = outputPriorityQueue.peek();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
index 1f93fc5..8293f4c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
@@ -46,74 +45,74 @@
     }
 
     @Override
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void insert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.INSERT);
         lsmHarness.modify(ctx, false, tuple);
     }
 
     @Override
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void update(ITupleReference tuple) throws HyracksDataException {
         // Update is the same as insert.
         ctx.setOperation(IndexOperation.UPDATE);
         lsmHarness.modify(ctx, false, tuple);
     }
 
     @Override
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void delete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.DELETE);
         lsmHarness.modify(ctx, false, tuple);
     }
 
     @Override
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void upsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.UPSERT);
         lsmHarness.modify(ctx, false, tuple);
     }
 
     @Override
-    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.INSERT);
         return lsmHarness.modify(ctx, true, tuple);
     }
 
     @Override
-    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.DELETE);
         return lsmHarness.modify(ctx, true, tuple);
     }
 
     @Override
-    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException {
         // Update is the same as insert.
         ctx.setOperation(IndexOperation.UPDATE);
         return lsmHarness.modify(ctx, true, tuple);
     }
 
     @Override
-    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.UPSERT);
         return lsmHarness.modify(ctx, true, tuple);
     }
 
     @Override
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
         ctx.setOperation(IndexOperation.SEARCH);
         lsmHarness.search(ctx, cursor, searchPred);
     }
 
     @Override
-    public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public void flush(ILSMIOOperation operation) throws HyracksDataException {
         lsmHarness.flush(ctx, operation);
     }
 
     @Override
-    public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public void merge(ILSMIOOperation operation) throws HyracksDataException {
         ctx.setOperation(IndexOperation.MERGE);
         lsmHarness.merge(ctx, operation);
     }
 
     @Override
-    public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void physicalDelete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.PHYSICALDELETE);
         lsmHarness.modify(ctx, false, tuple);
     }
@@ -126,7 +125,7 @@
 
     @Override
     public void scheduleMerge(ILSMIOOperationCallback callback, List<ILSMDiskComponent> components)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ctx.setOperation(IndexOperation.MERGE);
         ctx.getComponentsToBeMerged().clear();
         ctx.getComponentsToBeMerged().addAll(components);
@@ -143,40 +142,40 @@
     }
 
     @Override
-    public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException {
+    public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException {
         ctx.setOperation(IndexOperation.FULL_MERGE);
         lsmHarness.scheduleFullMerge(ctx, callback);
     }
 
     @Override
-    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.PHYSICALDELETE);
         lsmHarness.forceModify(ctx, tuple);
     }
 
     @Override
-    public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forceInsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.INSERT);
         lsmHarness.forceModify(ctx, tuple);
     }
 
     @Override
-    public void forceUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forceUpsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.UPSERT);
         lsmHarness.forceModify(ctx, tuple);
     }
 
     @Override
-    public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forceDelete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.DELETE);
         lsmHarness.forceModify(ctx, tuple);
     }
 
     @Override
     public void updateMeta(IValueReference key, IValueReference value) throws HyracksDataException {
-     // a hack because delete only gets the memory component
+        // a hack because delete only gets the memory component
         ctx.setOperation(IndexOperation.DELETE);
-        lsmHarness.updateMeta(ctx,key,value);
+        lsmHarness.updateMeta(ctx, key, value);
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
index 86be9c8..94587b8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
@@ -21,15 +21,13 @@
 import java.util.Map;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
 
 public class NoMergePolicy implements ILSMMergePolicy {
 
     @Override
-    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
-            throws HyracksDataException, IndexException {
+    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
         // Do nothing
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java
index 5f36339..23646b9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java
@@ -25,11 +25,10 @@
 import java.util.Map;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
@@ -40,8 +39,7 @@
     private int maxToleranceComponentCount;
 
     @Override
-    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
-            throws HyracksDataException, IndexException {
+    public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
 
         ArrayList<ILSMDiskComponent> immutableComponents = new ArrayList<>(index.getImmutableComponents());
 
@@ -66,7 +64,7 @@
     }
 
     @Override
-    public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException, IndexException {
+    public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException {
 
         /**
          * [for flow-control purpose]
@@ -221,7 +219,7 @@
      * @throws HyracksDataException
      * @throws IndexException
      */
-    private boolean scheduleMerge(final ILSMIndex index) throws HyracksDataException, IndexException {
+    private boolean scheduleMerge(final ILSMIndex index) throws HyracksDataException {
         // 1.  Look at the candidate components for merging in oldest-first order.  If one exists, identify the prefix of the sequence of
         // all such components for which the sum of their sizes exceeds MaxMrgCompSz.  Schedule a merge of those components into a new component.
         // 2.  If a merge from 1 doesn't happen, see if the set of candidate components for merging exceeds MaxTolCompCnt.  If so, schedule
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
index 449c8f9..3a368d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
@@ -25,13 +25,12 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 
 public interface IInvertedIndex extends IIndex {
     IInvertedListCursor createInvertedListCursor();
 
     void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     ITypeTraits[] getInvListTypeTraits();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
index 0d15986..f78eda9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
@@ -24,16 +24,14 @@
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 
 public interface IInvertedIndexAccessor extends IIndexAccessor {
     public IInvertedListCursor createInvertedListCursor();
 
     public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-            throws HyracksDataException, IndexException;
+            throws HyracksDataException;
 
     public IIndexCursor createRangeSearchCursor();
 
-    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException;
+    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
index c4269272..17e8ad2 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
@@ -26,13 +26,12 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
 import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
 
 public interface IInvertedIndexSearcher {
-    public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException;
+    public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
+            IIndexOperationContext ictx) throws HyracksDataException;
 
     public IFrameTupleAccessor createResultFrameTupleAccessor();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
index 9ec22d6..6ab6933 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
@@ -22,17 +22,16 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 public interface IInvertedListCursor extends Comparable<IInvertedListCursor> {
     public void reset(int startPageId, int endPageId, int startOff, int numElements);
 
-    public void pinPages() throws HyracksDataException, IndexException;
+    public void pinPages() throws HyracksDataException;
 
     public void unpinPages() throws HyracksDataException;
 
-    public boolean hasNext() throws HyracksDataException, IndexException;
+    public boolean hasNext() throws HyracksDataException;
 
     public void next() throws HyracksDataException;
 
@@ -47,11 +46,11 @@
 
     public int getStartOff();
 
-    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException, IndexException;
+    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException;
 
     // for debugging
     @SuppressWarnings("rawtypes")
-    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException, IndexException;
+    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException;
 
     @SuppressWarnings("rawtypes")
     public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
index b7bada3..df8e6f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
@@ -19,17 +19,16 @@
 
 package org.apache.hyracks.storage.am.lsm.invertedindex.api;
 
-import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedListPartitions;
 
 public interface IPartitionedInvertedIndex {
     public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
             short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
-            ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException;
+            List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException;
 
     public boolean isEmpty();
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
index fe5e94e..95010cc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
@@ -78,25 +77,19 @@
     @Override
     public IIndex createIndexInstance() throws HyracksDataException {
         IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
-        try {
-            IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-            IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
-            FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc, ctx.getTaskAttemptId()
-                    .getTaskId().getPartition(), ctx.getIOManager());
-            LSMInvertedIndex invIndex = InvertedIndexUtils.createLSMInvertedIndex(ctx.getIOManager(),
-                    virtualBufferCaches,
-                    diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
-                    invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
-                    invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
-                    diskBufferCache, fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
-                    opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
-                    ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
-                    filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                    invertedIndexFieldsForNonBulkLoadOps, durable, (IMetadataPageManagerFactory) opDesc
-                            .getPageManagerFactory());
-            return invIndex;
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+        IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+        FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc,
+                ctx.getTaskAttemptId().getTaskId().getPartition(), ctx.getIOManager());
+        LSMInvertedIndex invIndex = InvertedIndexUtils.createLSMInvertedIndex(ctx.getIOManager(), virtualBufferCaches,
+                diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
+                invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
+                invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(), diskBufferCache,
+                fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
+                opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
+                ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+                filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, durable,
+                (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
+        return invIndex;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
index 34c5810..07ca516 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
@@ -77,24 +76,19 @@
     @Override
     public IIndex createIndexInstance() throws HyracksDataException {
         IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
-        try {
-            IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-            IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
-            FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc, ctx.getTaskAttemptId()
-                    .getTaskId().getPartition(), ctx.getIOManager());
-            PartitionedLSMInvertedIndex invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ctx
-                    .getIOManager(),
-                    virtualBufferCaches, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
-                    invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
-                    invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
-                    diskBufferCache, fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
-                    opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
-                    ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
-                    filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                    invertedIndexFieldsForNonBulkLoadOps, durable, opDesc.getPageManagerFactory());
-            return invIndex;
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+        IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+        FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc,
+                ctx.getTaskAttemptId().getTaskId().getPartition(), ctx.getIOManager());
+        PartitionedLSMInvertedIndex invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ctx.getIOManager(),
+                virtualBufferCaches, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
+                invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
+                invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(), diskBufferCache,
+                fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
+                opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
+                ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+                filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, durable,
+                opDesc.getPageManagerFactory());
+        return invIndex;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java
deleted file mode 100644
index 14187f6..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.lsm.invertedindex.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.IndexException;
-
-public class InvertedIndexException extends IndexException {
-    private static final long serialVersionUID = 1L;
-
-    public InvertedIndexException(Exception e) {
-        super(e);
-    }
-
-    public InvertedIndexException(String msg) {
-        super(msg);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java
deleted file mode 100644
index 86a0287..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.lsm.invertedindex.exceptions;
-
-
-
-public class OccurrenceThresholdPanicException extends InvertedIndexException {
-    private static final long serialVersionUID = 1L;
-
-    public OccurrenceThresholdPanicException(String msg) {
-        super(msg);
-    }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
index fc0e4ec..cc2e7fb 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
@@ -22,9 +22,12 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -46,8 +49,6 @@
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -89,6 +90,7 @@
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
 
 public class LSMInvertedIndex extends AbstractLSMIndex implements IInvertedIndex {
+    private static final Logger LOGGER = Logger.getLogger(LSMInvertedIndex.class.getName());
 
     protected final IBinaryTokenizerFactory tokenizerFactory;
 
@@ -109,14 +111,13 @@
             OnDiskInvertedIndexFactory diskInvIndexFactory, BTreeFactory deletedKeysBTreeFactory,
             BloomFilterFactory bloomFilterFactory, ILSMComponentFilterFactory filterFactory,
             ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager,
-            double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager,
-            IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
-            ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields, int[] filterFields,
-            int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable)
-            throws IndexException, HyracksDataException {
+            double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
+            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
+            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
+            IBinaryTokenizerFactory tokenizerFactory, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
+            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
+            int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps,
+            boolean durable) throws HyracksDataException {
         super(ioManager, virtualBufferCaches, diskInvIndexFactory.getBufferCache(), fileManager, diskFileMapProvider,
                 bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback, filterFrameFactory,
                 filterManager, filterFields, durable);
@@ -165,27 +166,18 @@
         if (isActivated) {
             throw new HyracksDataException("Failed to activate the index since it is already activated.");
         }
-
-        try {
-            List<ILSMDiskComponent> immutableComponents = diskComponents;
-            immutableComponents.clear();
-            List<LSMComponentFileReferences> validFileReferences = fileManager.cleanupAndGetValidFiles();
-            for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
-                LSMInvertedIndexDiskComponent component;
-                try {
-                    component = createDiskInvIndexComponent(componentFactory,
-                            lsmComonentFileReference.getInsertIndexFileReference(),
-                            lsmComonentFileReference.getDeleteIndexFileReference(),
-                            lsmComonentFileReference.getBloomFilterFileReference(), false);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
-                immutableComponents.add(component);
-            }
-            isActivated = true;
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
+        List<ILSMDiskComponent> immutableComponents = diskComponents;
+        immutableComponents.clear();
+        List<LSMComponentFileReferences> validFileReferences = fileManager.cleanupAndGetValidFiles();
+        for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
+            LSMInvertedIndexDiskComponent component;
+            component = createDiskInvIndexComponent(componentFactory,
+                    lsmComonentFileReference.getInsertIndexFileReference(),
+                    lsmComonentFileReference.getDeleteIndexFileReference(),
+                    lsmComonentFileReference.getBloomFilterFileReference(), false);
+            immutableComponents.add(component);
         }
+        isActivated = true;
     }
 
     @Override
@@ -215,8 +207,7 @@
         }
         if (flushOnExit) {
             BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback);
-            ILSMIndexAccessor accessor =
-                    createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+            ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             accessor.scheduleFlush(cb);
             try {
                 cb.waitForIO();
@@ -322,8 +313,7 @@
      * - Insert key into deleted-keys BTree.
      */
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         LSMInvertedIndexOpContext ctx = (LSMInvertedIndexOpContext) ictx;
         // TODO: This is a hack to support logging properly in ASTERIX.
         // The proper undo operations are only dependent on the after image so
@@ -355,8 +345,12 @@
                 ctx.keysOnlyTuple.reset(indexTuple);
                 try {
                     ctx.currentDeletedKeysBTreeAccessors.insert(ctx.keysOnlyTuple);
-                } catch (TreeIndexDuplicateKeyException e) {
-                    // Key has already been deleted.
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                        // Key has already been deleted.
+                        LOGGER.log(Level.WARNING, "Failure during index delete operation", e);
+                        throw e;
+                    }
                 }
                 break;
             }
@@ -373,7 +367,7 @@
 
     @Override
     public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
         int numComponents = operationalComponents.size();
         boolean includeMutableComponent = false;
@@ -387,9 +381,8 @@
                 IIndexAccessor invIndexAccessor = ((LSMInvertedIndexMemoryComponent) component).getInvIndex()
                         .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
                 indexAccessors.add(invIndexAccessor);
-                IIndexAccessor deletedKeysAccessor =
-                        ((LSMInvertedIndexMemoryComponent) component).getDeletedKeysBTree()
-                                .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+                IIndexAccessor deletedKeysAccessor = ((LSMInvertedIndexMemoryComponent) component).getDeletedKeysBTree()
+                        .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
                 deletedKeysBTreeAccessors.add(deletedKeysAccessor);
             } else {
                 IIndexAccessor invIndexAccessor = ((LSMInvertedIndexDiskComponent) component).getInvIndex()
@@ -465,7 +458,7 @@
     }
 
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
         LSMInvertedIndexFlushOperation flushOp = (LSMInvertedIndexFlushOperation) operation;
 
         // Create an inverted index instance to be bulk loaded.
@@ -553,7 +546,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMInvertedIndexOpContext ictx =
                 createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         ictx.setOperation(IndexOperation.MERGE);
@@ -569,8 +562,7 @@
         OnDiskInvertedIndex lastInvIndex = (OnDiskInvertedIndex) lastComponent.getInvIndex();
         String lastFileName = lastInvIndex.getBTree().getFileReference().getFile().getName();
 
-        LSMComponentFileReferences relMergeFileRefs =
-                fileManager.getRelMergeFileReference(firstFileName, lastFileName);
+        LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(firstFileName, lastFileName);
         ILSMIndexAccessor accessor = new LSMInvertedIndexAccessor(lsmHarness, ctx);
         ioScheduler.scheduleOperation(new LSMInvertedIndexMergeOperation(accessor, mergingComponents, cursor,
                 relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs.getDeleteIndexFileReference(),
@@ -578,7 +570,7 @@
     }
 
     @Override
-    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
         LSMInvertedIndexMergeOperation mergeOp = (LSMInvertedIndexMergeOperation) operation;
         IIndexCursor cursor = mergeOp.getCursor();
 
@@ -670,12 +662,8 @@
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws IndexException {
-        try {
-            return new LSMInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex);
-        } catch (HyracksDataException e) {
-            throw new IndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex);
     }
 
     public class LSMInvertedIndexBulkLoader implements IIndexBulkLoader {
@@ -689,17 +677,13 @@
         public final MultiComparator filterCmp;
 
         public LSMInvertedIndexBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex) throws IndexException, HyracksDataException {
+                boolean checkIfEmptyIndex) throws HyracksDataException {
             if (checkIfEmptyIndex && !isEmptyIndex()) {
-                throw new IndexException("Cannot load an index that is not empty");
+                throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
             }
             // Note that by using a flush target file name, we state that the
             // new bulk loaded tree is "newer" than any other merged tree.
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException | IndexException e) {
-                throw new IndexException(e);
-            }
+            component = createBulkLoadTarget();
             invIndexBulkLoader = ((LSMInvertedIndexDiskComponent) component).getInvIndex().createBulkLoader(fillFactor,
                     verifyInput, numElementsHint, false);
 
@@ -719,7 +703,7 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 ITupleReference t;
                 if (indexTuple != null) {
@@ -736,7 +720,7 @@
                     component.getLSMComponentFilter().update(filterTuple, filterCmp);
                 }
 
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -758,7 +742,7 @@
         }
 
         @Override
-        public void end() throws IndexException, HyracksDataException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
                 if (component.getLSMComponentFilter() != null) {
                     filterManager.writeFilter(component.getLSMComponentFilter(),
@@ -788,7 +772,7 @@
             }
         }
 
-        private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+        private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
             LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
             return createDiskInvIndexComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
                     componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
@@ -797,7 +781,7 @@
     }
 
     protected InMemoryInvertedIndex createInMemoryInvertedIndex(IVirtualBufferCache virtualBufferCache,
-            VirtualFreePageManager virtualFreePageManager, int id) throws IndexException, HyracksDataException {
+            VirtualFreePageManager virtualFreePageManager, int id) throws HyracksDataException {
         return InvertedIndexUtils.createInMemoryBTreeInvertedindex(virtualBufferCache, virtualFreePageManager,
                 invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
                 ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_vocab_" + id));
@@ -805,7 +789,7 @@
 
     protected LSMInvertedIndexDiskComponent createDiskInvIndexComponent(ILSMDiskComponentFactory factory,
             FileReference dictBTreeFileRef, FileReference btreeFileRef, FileReference bloomFilterFileRef,
-            boolean create) throws HyracksDataException, IndexException {
+            boolean create) throws HyracksDataException {
         LSMInvertedIndexDiskComponent component = (LSMInvertedIndexDiskComponent) factory
                 .createComponent(new LSMComponentFileReferences(dictBTreeFileRef, btreeFileRef, bloomFilterFileRef));
         if (create) {
@@ -843,7 +827,7 @@
 
     @Override
     public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+            IIndexOperationContext ictx) throws HyracksDataException {
         throw new UnsupportedOperationException("Cannot open inverted list cursor on lsm inverted index.");
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
index 604a57c..ec869fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
@@ -48,31 +47,31 @@
     }
 
     @Override
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void insert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.INSERT);
         lsmHarness.modify(ctx, false, tuple);
     }
 
     @Override
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void delete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.DELETE);
         lsmHarness.modify(ctx, false, tuple);
     }
 
     @Override
-    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryInsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.INSERT);
         return lsmHarness.modify(ctx, true, tuple);
     }
 
     @Override
-    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryDelete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.DELETE);
         return lsmHarness.modify(ctx, true, tuple);
     }
 
     @Override
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
         ctx.setOperation(IndexOperation.SEARCH);
         lsmHarness.search(ctx, cursor, searchPred);
     }
@@ -89,13 +88,13 @@
     }
 
     @Override
-    public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public void flush(ILSMIOOperation operation) throws HyracksDataException {
         lsmHarness.flush(ctx, operation);
     }
 
     @Override
     public void scheduleMerge(ILSMIOOperationCallback callback, List<ILSMDiskComponent> components)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ctx.setOperation(IndexOperation.MERGE);
         ctx.getComponentsToBeMerged().clear();
         ctx.getComponentsToBeMerged().addAll(components);
@@ -112,13 +111,13 @@
     }
 
     @Override
-    public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException {
+    public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException {
         ctx.setOperation(IndexOperation.FULL_MERGE);
         lsmHarness.scheduleFullMerge(ctx, callback);
     }
 
     @Override
-    public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public void merge(ILSMIOOperation operation) throws HyracksDataException {
         lsmHarness.merge(ctx, operation);
     }
 
@@ -128,50 +127,49 @@
     }
 
     @Override
-    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred)
-            throws IndexException, HyracksDataException {
+    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
         search(cursor, searchPred);
     }
 
     @Override
-    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Physical delete not supported by lsm inverted index.");
     }
 
     @Override
-    public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forceInsert(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.INSERT);
         lsmHarness.forceModify(ctx, tuple);
     }
 
     @Override
-    public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forceDelete(ITupleReference tuple) throws HyracksDataException {
         ctx.setOperation(IndexOperation.DELETE);
         lsmHarness.forceModify(ctx, tuple);
     }
 
     @Override
-    public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void physicalDelete(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Physical delete not supported by lsm inverted index.");
     }
 
     @Override
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void update(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Update not supported by lsm inverted index.");
     }
 
     @Override
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void upsert(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
     }
 
     @Override
-    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Update not supported by lsm inverted index.");
     }
 
     @Override
-    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
     }
 
@@ -182,7 +180,7 @@
 
     @Override
     public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         throw new UnsupportedOperationException("Cannot open inverted list cursor on lsm inverted index.");
     }
 
@@ -201,7 +199,7 @@
     }
 
     @Override
-    public void forceUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void forceUpsert(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java
index 4ebabdb..6a875a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMIndexSearchCursor;
@@ -39,14 +38,14 @@
     }
 
     @Override
-    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
         return false;
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
-            IndexException {
-        LSMInvertedIndexRangeSearchCursorInitialState lsmInitialState = (LSMInvertedIndexRangeSearchCursorInitialState) initialState;
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
+        LSMInvertedIndexRangeSearchCursorInitialState lsmInitialState =
+                (LSMInvertedIndexRangeSearchCursorInitialState) initialState;
         cmp = lsmInitialState.getOriginalKeyComparator();
         operationalComponents = lsmInitialState.getOperationalComponents();
         // We intentionally set the lsmHarness to null so that we don't call lsmHarness.endSearch() because we already do that when we merge the inverted indexes.
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
index 97255f6..aeb1e16 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
 import org.apache.hyracks.storage.am.lsm.common.impls.BTreeFactory;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
@@ -79,8 +78,8 @@
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
         return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
-                createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createFlushFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
+                createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX),
+                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -92,12 +91,12 @@
         String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
         // Get the range of timestamps by taking the earliest and the latest timestamps
         return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
-                createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createMergeFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING));
+                createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX),
+                createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
         List<LSMComponentFileReferences> validFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allDictBTreeFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allInvListsFiles = new ArrayList<>();
@@ -133,8 +132,8 @@
 
         if (allDictBTreeFiles.size() == 1 && allInvListsFiles.size() == 1 && allDeletedKeysBTreeFiles.size() == 1
                 && allBloomFilterFiles.size() == 1) {
-            validFiles.add(new LSMComponentFileReferences(allDictBTreeFiles.get(0).fileRef, allDeletedKeysBTreeFiles
-                    .get(0).fileRef, allBloomFilterFiles.get(0).fileRef));
+            validFiles.add(new LSMComponentFileReferences(allDictBTreeFiles.get(0).fileRef,
+                    allDeletedKeysBTreeFiles.get(0).fileRef, allBloomFilterFiles.get(0).fileRef));
             return validFiles;
         }
 
@@ -184,7 +183,8 @@
                 invalidBloomFilterFile.delete();
             } else {
                 // This scenario should not be possible.
-                throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+                throw new HyracksDataException(
+                        "Found LSM files with overlapping but not contained timetamp intervals.");
             }
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
index 03ba304..26eed04 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -69,7 +68,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.flush(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
index c22af32..ffc1e8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -80,7 +79,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.merge(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
index 358317c..0d37056 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
@@ -28,7 +28,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -56,9 +55,9 @@
     }
 
     @Override
-    public void open(ICursorInitialState initState, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
-        LSMInvertedIndexRangeSearchCursorInitialState lsmInitState = (LSMInvertedIndexRangeSearchCursorInitialState) initState;
+    public void open(ICursorInitialState initState, ISearchPredicate searchPred) throws HyracksDataException {
+        LSMInvertedIndexRangeSearchCursorInitialState lsmInitState =
+                (LSMInvertedIndexRangeSearchCursorInitialState) initState;
         cmp = lsmInitState.getOriginalKeyComparator();
         int numComponents = lsmInitState.getNumComponents();
         rangeCursors = new IIndexCursor[numComponents];
@@ -100,7 +99,7 @@
      * Check deleted-keys BTrees whether they contain the key in the checkElement's tuple.
      */
     @Override
-    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
         keysOnlyTuple.reset(checkElement.getTuple());
         int end = checkElement.getCursorIndex();
         for (int i = 0; i < end; i++) {
@@ -110,8 +109,6 @@
                 if (deletedKeysBTreeCursors[i].hasNext()) {
                     return true;
                 }
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
             } finally {
                 deletedKeysBTreeCursors[i].close();
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
index 521d81d..607f957 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
@@ -29,14 +29,12 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 import org.apache.hyracks.storage.am.lsm.common.impls.BloomFilterAwareBTreePointSearchCursor;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
 
 /**
  * Searches the components one-by-one, completely consuming a cursor before moving on to the next one.
@@ -82,8 +80,8 @@
                 // No need for a bloom filter for the in-memory BTree.
                 deletedKeysBTreeCursors[i] = deletedKeysBTreeAccessors.get(i).createSearchCursor(false);
             } else {
-                deletedKeysBTreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor((IBTreeLeafFrame) lsmInitState
-                        .getgetDeletedKeysBTreeLeafFrameFactory().createFrame(), false,
+                deletedKeysBTreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor(
+                        (IBTreeLeafFrame) lsmInitState.getgetDeletedKeysBTreeLeafFrameFactory().createFrame(), false,
                         ((LSMInvertedIndexDiskComponent) operationalComponents.get(i)).getBloomFilter());
             }
         }
@@ -92,7 +90,7 @@
         keySearchPred = new RangePredicate(null, null, true, true, keyCmp, keyCmp);
     }
 
-    protected boolean isDeleted(ITupleReference key) throws HyracksDataException, IndexException {
+    protected boolean isDeleted(ITupleReference key) throws HyracksDataException {
         keySearchPred.setLowKey(key, true);
         keySearchPred.setHighKey(key, true);
         for (int i = 0; i < accessorIndex; i++) {
@@ -102,8 +100,6 @@
                 if (deletedKeysBTreeCursors[i].hasNext()) {
                     return true;
                 }
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
             } finally {
                 deletedKeysBTreeCursors[i].close();
             }
@@ -112,7 +108,7 @@
     }
 
     // Move to the next tuple that has not been deleted.
-    private boolean nextValidTuple() throws HyracksDataException, IndexException {
+    private boolean nextValidTuple() throws HyracksDataException {
         while (currentCursor.hasNext()) {
             currentCursor.next();
             if (!isDeleted(currentCursor.getTuple())) {
@@ -124,7 +120,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         if (!tupleConsumed) {
             return true;
         }
@@ -139,13 +135,7 @@
             // Current cursor has been exhausted, switch to next accessor/cursor.
             currentAccessor = indexAccessors.get(accessorIndex);
             currentCursor = currentAccessor.createSearchCursor(false);
-            try {
-                currentAccessor.search(currentCursor, searchPred);
-            } catch (OccurrenceThresholdPanicException e) {
-                throw e;
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            currentAccessor.search(currentCursor, searchPred);
             if (nextValidTuple()) {
                 return true;
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
index 0cd19c9..8cd45dc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -50,25 +49,23 @@
             OnDiskInvertedIndexFactory diskInvIndexFactory, BTreeFactory deletedKeysBTreeFactory,
             BloomFilterFactory bloomFilterFactory, ILSMComponentFilterFactory filterFactory,
             ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager,
-            double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager,
-            IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
-            IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
-            ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
-            ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields, int[] filterFields,
-            int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable)
-            throws IndexException, HyracksDataException {
+            double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
+            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
+            ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
+            IBinaryTokenizerFactory tokenizerFactory, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
+            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
+            int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps,
+            boolean durable) throws HyracksDataException {
         super(ioManager, virtualBufferCaches, diskInvIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory,
-                filterFactory,
-                filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider,
-                invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
-                mergePolicy, opTracker, ioScheduler, ioOpCallback, invertedIndexFields, filterFields,
+                filterFactory, filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, fileManager,
+                diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
+                tokenizerFactory, mergePolicy, opTracker, ioScheduler, ioOpCallback, invertedIndexFields, filterFields,
                 filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, durable);
     }
 
     @Override
     protected InMemoryInvertedIndex createInMemoryInvertedIndex(IVirtualBufferCache virtualBufferCache,
-            VirtualFreePageManager virtualFreePageManager, int id) throws IndexException, HyracksDataException {
+            VirtualFreePageManager virtualFreePageManager, int id) throws HyracksDataException {
         return InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(virtualBufferCache,
                 virtualFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
                 tokenizerFactory, ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_vocab_" + id));
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
index 583c5f4..1b33e79 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
@@ -20,6 +20,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -33,9 +34,6 @@
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IPageManager;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
@@ -112,7 +110,7 @@
     }
 
     public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
         ctx.tupleIter.reset(tuple);
         while (ctx.tupleIter.hasNext()) {
@@ -120,16 +118,19 @@
             ITupleReference insertTuple = ctx.tupleIter.getTuple();
             try {
                 btreeAccessor.insert(insertTuple);
-            } catch (TreeIndexDuplicateKeyException e) {
-                // This exception may be caused by duplicate tokens in the same insert "document".
-                // We ignore such duplicate tokens in all inverted-index implementations, hence
-                // we can safely ignore this exception.
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    // This exception may be caused by duplicate tokens in the same insert "document".
+                    // We ignore such duplicate tokens in all inverted-index implementations, hence
+                    // we can safely ignore this exception.
+                    throw e;
+                }
             }
         }
     }
 
     public void delete(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
         ctx.tupleIter.reset(tuple);
         while (ctx.tupleIter.hasNext()) {
@@ -137,8 +138,11 @@
             ITupleReference deleteTuple = ctx.tupleIter.getTuple();
             try {
                 btreeAccessor.delete(deleteTuple);
-            } catch (TreeIndexNonExistentKeyException e) {
-                // Ignore this exception, since a document may have duplicate tokens.
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                    // Ignore this exception, since a document may have duplicate tokens.
+                    throw e;
+                }
             }
         }
     }
@@ -156,7 +160,7 @@
 
     @Override
     public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+            IIndexOperationContext ictx) throws HyracksDataException {
         InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
         ctx.setOperation(IndexOperation.SEARCH);
         InMemoryInvertedListCursor inMemListCursor = (InMemoryInvertedListCursor) listCursor;
@@ -192,7 +196,7 @@
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws IndexException {
+            boolean checkIfEmptyIndex) throws HyracksDataException {
         throw new UnsupportedOperationException("Bulk load not supported by in-memory inverted index.");
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
index ffe5259..26fa40a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
@@ -28,7 +28,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
@@ -57,13 +56,13 @@
     }
 
     @Override
-    public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void insert(ITupleReference tuple) throws HyracksDataException {
         opCtx.setOperation(IndexOperation.INSERT);
         index.insert(tuple, btreeAccessor, opCtx);
     }
 
     @Override
-    public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void delete(ITupleReference tuple) throws HyracksDataException {
         opCtx.setOperation(IndexOperation.DELETE);
         index.delete(tuple, btreeAccessor, opCtx);
     }
@@ -74,7 +73,7 @@
     }
 
     @Override
-    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+    public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
         searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred, opCtx);
     }
 
@@ -85,7 +84,7 @@
 
     @Override
     public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         index.openInvertedListCursor(listCursor, searchKey, opCtx);
     }
 
@@ -96,8 +95,7 @@
     }
 
     @Override
-    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
-            HyracksDataException {
+    public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
         btreeAccessor.search(cursor, searchPred);
     }
 
@@ -106,12 +104,12 @@
     }
 
     @Override
-    public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void update(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Update not supported by in-memory inverted index.");
     }
 
     @Override
-    public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void upsert(ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("Upsert not supported by in-memory inverted index.");
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
index 9299620..60f8e21 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
@@ -28,13 +28,10 @@
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.utils.TupleUtils;
 import org.apache.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
 import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
@@ -68,7 +65,7 @@
     }
 
     public void prepare(BTreeAccessor btreeAccessor, RangePredicate btreePred, MultiComparator tokenFieldsCmp,
-            MultiComparator btreeCmp) throws HyracksDataException, IndexException {
+            MultiComparator btreeCmp) throws HyracksDataException {
         // Avoid object creation if this.btreeAccessor == btreeAccessor.
         if (this.btreeAccessor != btreeAccessor) {
             this.btreeAccessor = btreeAccessor;
@@ -87,7 +84,7 @@
         return size() - cursor.size();
     }
 
-    public void reset(ITupleReference tuple) throws HyracksDataException, IndexException {
+    public void reset(ITupleReference tuple) throws HyracksDataException {
         numElements = -1;
         // Copy the tokens tuple for later use in btree probes.
         TupleUtils.copyTuple(tokenTupleBuilder, tuple, tuple.getFieldCount());
@@ -104,7 +101,7 @@
     }
 
     @Override
-    public void pinPages() throws HyracksDataException, IndexException {
+    public void pinPages() throws HyracksDataException {
         btreePred.setLowKeyComparator(tokenFieldsCmp);
         btreePred.setHighKeyComparator(tokenFieldsCmp);
         btreePred.setLowKey(tokenTuple, true);
@@ -122,7 +119,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         return btreeCursor.hasNext();
     }
 
@@ -155,8 +152,6 @@
                 }
             } catch (HyracksDataException e) {
                 e.printStackTrace();
-            } catch (IndexException e) {
-                e.printStackTrace();
             } finally {
                 try {
                     countingCursor.close();
@@ -184,8 +179,7 @@
     }
 
     @Override
-    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException,
-            IndexException {
+    public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException {
         // Close cursor if necessary.
         unpinPages();
         btreeSearchTuple.addTuple(searchTuple);
@@ -195,9 +189,9 @@
         btreePred.setHighKey(btreeSearchTuple, true);
         try {
             btreeAccessor.search(btreeCursor, btreePred);
-        } catch (TreeIndexException e) {
+        } catch (Exception e) {
             btreeSearchTuple.removeLastTuple();
-            throw new HyracksDataException(e);
+            throw HyracksDataException.create(e);
         }
         boolean containsKey = false;
         try {
@@ -212,7 +206,7 @@
 
     @SuppressWarnings("rawtypes")
     @Override
-    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException, IndexException {
+    public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException {
         StringBuilder strBuilder = new StringBuilder();
         try {
             while (btreeCursor.hasNext()) {
@@ -228,11 +222,7 @@
             btreeCursor.close();
             btreeCursor.reset();
         }
-        try {
-            btreeAccessor.search(btreeCursor, btreePred);
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
+        btreeAccessor.search(btreeCursor, btreePred);
         return strBuilder.toString();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
index 0bea41f..24d1cdd 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
@@ -18,7 +18,7 @@
  */
 package org.apache.hyracks.storage.am.lsm.invertedindex.inmemory;
 
-import java.util.ArrayList;
+import java.util.List;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -32,7 +32,6 @@
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IPageManager;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
@@ -59,7 +58,7 @@
 
     @Override
     public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         super.insert(tuple, btreeAccessor, ictx);
         PartitionedInMemoryInvertedIndexOpContext ctx = (PartitionedInMemoryInvertedIndexOpContext) ictx;
         PartitionedInvertedIndexTokenizingTupleIterator tupleIter =
@@ -98,7 +97,7 @@
     @Override
     public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
             short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
-            ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException {
+            List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException {
         short minPartitionIndex;
         short maxPartitionIndex;
         partitionIndexLock.readLock().lock();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
index 70a5024..1dcd18e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
@@ -46,9 +46,6 @@
 import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.api.UnsortedInputException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
@@ -57,7 +54,6 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.InvertedIndexException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
 import org.apache.hyracks.storage.am.lsm.invertedindex.search.TOccurrenceSearcher;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
@@ -258,7 +254,7 @@
 
     @Override
     public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+            IIndexOperationContext ictx) throws HyracksDataException {
         OnDiskInvertedIndexOpContext ctx = (OnDiskInvertedIndexOpContext) ictx;
         ctx.btreePred.setLowKeyComparator(ctx.searchCmp);
         ctx.btreePred.setHighKeyComparator(ctx.searchCmp);
@@ -311,7 +307,7 @@
         private IFIFOPageQueue queue;
 
         public OnDiskInvertedIndexBulkLoader(float btreeFillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex, int startPageId) throws IndexException, HyracksDataException {
+                boolean checkIfEmptyIndex, int startPageId) throws HyracksDataException {
             this.verifyInput = verifyInput;
             this.tokenCmp = MultiComparator.create(btree.getComparatorFactories());
             this.invListCmp = MultiComparator.create(invListCmpFactories);
@@ -338,7 +334,7 @@
             currentPage = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, currentPageId));
         }
 
-        private void createAndInsertBTreeTuple() throws IndexException, HyracksDataException {
+        private void createAndInsertBTreeTuple() throws HyracksDataException {
             // Build tuple.
             btreeTupleBuilder.reset();
             DataOutput output = btreeTupleBuilder.getDataOutput();
@@ -375,7 +371,7 @@
          * Key fields of inverted list are fixed size.
          */
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             boolean firstElement = lastTupleBuilder.getSize() == 0;
             boolean startNewList = firstElement;
             if (!firstElement) {
@@ -416,7 +412,7 @@
 
             if (verifyInput && lastTupleBuilder.getSize() != 0) {
                 if (allCmp.compare(tuple, lastTuple) <= 0) {
-                    throw new UnsortedInputException(
+                    throw new HyracksDataException(
                             "Input stream given to OnDiskInvertedIndex bulk load is not sorted.");
                 }
             }
@@ -430,7 +426,7 @@
         }
 
         @Override
-        public void end() throws IndexException, HyracksDataException {
+        public void end() throws HyracksDataException {
             // The last tuple builder is empty if add() was never called.
             if (lastTupleBuilder.getSize() != 0) {
                 createAndInsertBTreeTuple();
@@ -505,10 +501,8 @@
         }
 
         @Override
-        public void search(IIndexCursor cursor, ISearchPredicate searchPred)
-                throws HyracksDataException, IndexException {
-            searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred,
-                    opCtx);
+        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
+            searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred, opCtx);
         }
 
         @Override
@@ -518,7 +512,7 @@
 
         @Override
         public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
-                throws HyracksDataException, IndexException {
+                throws HyracksDataException {
             index.openInvertedListCursor(listCursor, searchKey, opCtx);
         }
 
@@ -528,29 +522,28 @@
         }
 
         @Override
-        public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred)
-                throws HyracksDataException, IndexException {
+        public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
             OnDiskInvertedIndexRangeSearchCursor rangeSearchCursor = (OnDiskInvertedIndexRangeSearchCursor) cursor;
             rangeSearchCursor.open(null, searchPred);
         }
 
         @Override
-        public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void insert(ITupleReference tuple) throws HyracksDataException {
             throw new UnsupportedOperationException("Insert not supported by inverted index.");
         }
 
         @Override
-        public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void update(ITupleReference tuple) throws HyracksDataException {
             throw new UnsupportedOperationException("Update not supported by inverted index.");
         }
 
         @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             throw new UnsupportedOperationException("Delete not supported by inverted index.");
         }
 
         @Override
-        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void upsert(ITupleReference tuple) throws HyracksDataException {
             throw new UnsupportedOperationException("Upsert not supported by inverted index.");
         }
     }
@@ -603,13 +596,9 @@
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws IndexException {
-        try {
-            return new OnDiskInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex,
-                    rootPageId);
-        } catch (HyracksDataException e) {
-            throw new InvertedIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new OnDiskInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex,
+                rootPageId);
     }
 
     @Override
@@ -628,8 +617,7 @@
         PermutingTupleReference tokenTuple = new PermutingTupleReference(fieldPermutation);
 
         IInvertedIndexAccessor invIndexAccessor =
-                (IInvertedIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
-                        NoOpOperationCallback.INSTANCE);
+                (IInvertedIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         IInvertedListCursor invListCursor = invIndexAccessor.createInvertedListCursor();
         MultiComparator invListCmp = MultiComparator.create(invListCmpFactories);
 
@@ -667,8 +655,6 @@
                     invListCursor.unpinPages();
                 }
             }
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
         } finally {
             btreeCursor.close();
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
index bd6ce9b..4a74833 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
@@ -70,19 +69,15 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         this.btreePred = (RangePredicate) searchPred;
-        try {
-            btreeAccessor.search(btreeCursor, btreePred);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        btreeAccessor.search(btreeCursor, btreePred);
         invListCursor.pinPages();
         unpinNeeded = true;
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         if (invListCursor.hasNext()) {
             return true;
         }
@@ -95,11 +90,7 @@
         }
         btreeCursor.next();
         tokenTuple.reset(btreeCursor.getTuple());
-        try {
-            invIndex.openInvertedListCursor(invListCursor, tokenTuple, opCtx);
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        invIndex.openInvertedListCursor(invListCursor, tokenTuple, opCtx);
         invListCursor.pinPages();
         invListCursor.hasNext();
         unpinNeeded = true;
@@ -127,7 +118,7 @@
     }
 
     @Override
-    public void reset() throws HyracksDataException, IndexException {
+    public void reset() throws HyracksDataException {
         if (unpinNeeded) {
             invListCursor.unpinPages();
             unpinNeeded = false;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
index a3a4de4..697d217 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
@@ -19,7 +19,7 @@
 
 package org.apache.hyracks.storage.am.lsm.invertedindex.ondisk;
 
-import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -32,7 +32,6 @@
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
@@ -70,7 +69,7 @@
     @Override
     public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
             short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
-            ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException {
+            List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException {
         PartitionedTOccurrenceSearcher partSearcher = (PartitionedTOccurrenceSearcher) searcher;
         OnDiskInvertedIndexOpContext ctx = (OnDiskInvertedIndexOpContext) ictx;
         ITupleReference lowSearchKey = null;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
index e4b220b..764d9a5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
@@ -42,7 +42,6 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
 import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeTupleReference;
 import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
@@ -52,8 +51,8 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.ObjectCache;
 
 public abstract class AbstractTOccurrenceSearcher implements IInvertedIndexSearcher {
-    protected static final RecordDescriptor QUERY_TOKEN_REC_DESC = new RecordDescriptor(
-            new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+    protected static final RecordDescriptor QUERY_TOKEN_REC_DESC =
+            new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
 
     protected final int OBJECT_CACHE_INIT_SIZE = 10;
     protected final int OBJECT_CACHE_EXPAND_SIZE = 10;
@@ -82,20 +81,20 @@
         this.invIndex = invIndex;
         this.invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
         this.invListCursorFactory = new InvertedListCursorFactory(invIndex);
-        this.invListCursorCache = new ObjectCache<IInvertedListCursor>(invListCursorFactory, OBJECT_CACHE_INIT_SIZE,
+        this.invListCursorCache = new ObjectCache<>(invListCursorFactory, OBJECT_CACHE_INIT_SIZE,
                 OBJECT_CACHE_EXPAND_SIZE);
-        this.queryTokenFrame =  new VSizeFrame(ctx);
+        this.queryTokenFrame = new VSizeFrame(ctx);
         this.queryTokenAppender = new FrameTupleAppenderAccessor(QUERY_TOKEN_REC_DESC);
         this.queryTokenAppender.reset(queryTokenFrame, true);
     }
 
+    @Override
     public void reset() {
         searchResult.clear();
         invListMerger.reset();
     }
 
-    protected void tokenizeQuery(InvertedIndexSearchPredicate searchPred) throws HyracksDataException,
-            OccurrenceThresholdPanicException {
+    protected void tokenizeQuery(InvertedIndexSearchPredicate searchPred) throws HyracksDataException {
         ITupleReference queryTuple = searchPred.getQueryTuple();
         int queryFieldIndex = searchPred.getQueryFieldIndex();
         IBinaryTokenizer queryTokenizer = searchPred.getQueryTokenizer();
@@ -144,10 +143,12 @@
         }
     }
 
+    @Override
     public IFrameTupleAccessor createResultFrameTupleAccessor() {
         return new FixedSizeFrameTupleAccessor(ctx.getInitialFrameSize(), searchResult.getTypeTraits());
     }
 
+    @Override
     public ITupleReference createResultFrameTupleReference() {
         return new FixedSizeTupleReference(searchResult.getTypeTraits());
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
index 5c916f2..55aa159 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
@@ -27,8 +27,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
@@ -50,7 +48,7 @@
     }
 
     public void merge(ArrayList<IInvertedListCursor> invListCursors, int occurrenceThreshold, int numPrefixLists,
-            SearchResult searchResult) throws HyracksDataException, IndexException {
+            SearchResult searchResult) throws HyracksDataException {
         Collections.sort(invListCursors);
         int numInvLists = invListCursors.size();
         SearchResult result = null;
@@ -88,7 +86,7 @@
 
     protected void mergeSuffixListProbe(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
             SearchResult newSearchResult, int invListIx, int numInvLists, int occurrenceThreshold)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
 
         int prevBufIdx = 0;
         int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
@@ -104,7 +102,8 @@
         while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
 
             resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
-            int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+            int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
 
             if (invListCursor.containsKey(resultTuple, invListCmp)) {
                 count++;
@@ -129,7 +128,7 @@
 
     protected void mergeSuffixListScan(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
             SearchResult newSearchResult, int invListIx, int numInvLists, int occurrenceThreshold)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
 
         int prevBufIdx = 0;
         int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
@@ -147,8 +146,9 @@
         int invListTidx = 0;
         int invListNumTuples = invListCursor.size();
 
-        if (invListCursor.hasNext())
+        if (invListCursor.hasNext()) {
             invListCursor.next();
+        }
 
         while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
 
@@ -158,7 +158,8 @@
 
             int cmp = invListCmp.compare(invListTuple, resultTuple);
             if (cmp == 0) {
-                int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+                int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
                 newSearchResult.append(resultTuple, count);
                 advanceCursor = true;
                 advancePrevResult = true;
@@ -167,7 +168,8 @@
                     advanceCursor = true;
                     advancePrevResult = false;
                 } else {
-                    int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+                    int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
                     if (count + numInvLists - invListIx > occurrenceThreshold) {
                         newSearchResult.append(resultTuple, count);
                     }
@@ -201,7 +203,8 @@
 
             resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
 
-            int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+            int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
             if (count + numInvLists - invListIx > occurrenceThreshold) {
                 newSearchResult.append(resultTuple, count);
             }
@@ -219,7 +222,7 @@
     }
 
     protected void mergePrefixList(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
-            SearchResult newSearchResult) throws HyracksDataException, IndexException {
+            SearchResult newSearchResult) throws HyracksDataException {
 
         int prevBufIdx = 0;
         int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
@@ -237,8 +240,9 @@
         int invListTidx = 0;
         int invListNumTuples = invListCursor.size();
 
-        if (invListCursor.hasNext())
+        if (invListCursor.hasNext()) {
             invListCursor.next();
+        }
 
         while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
 
@@ -247,7 +251,8 @@
 
             int cmp = invListCmp.compare(invListTuple, resultTuple);
             if (cmp == 0) {
-                int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+                int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                        resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
                 newSearchResult.append(resultTuple, count);
                 advanceCursor = true;
                 advancePrevResult = true;
@@ -258,7 +263,8 @@
                     advanceCursor = true;
                     advancePrevResult = false;
                 } else {
-                    int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+                    int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                            resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
                     newSearchResult.append(resultTuple, count);
                     advanceCursor = false;
                     advancePrevResult = true;
@@ -300,7 +306,8 @@
 
             resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
 
-            int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+            int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                    resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
             newSearchResult.append(resultTuple, count);
 
             resultTidx++;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
index 7c7e781..9221e1f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
@@ -23,19 +23,18 @@
 import java.util.ArrayList;
 
 import org.apache.hyracks.api.context.IHyracksCommonContext;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.primitive.ShortPointable;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IPartitionedInvertedIndex;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
 
 public class PartitionedTOccurrenceSearcher extends AbstractTOccurrenceSearcher {
@@ -50,7 +49,7 @@
     // Inverted list cursors ordered by token. Used to read relevant inverted-list partitions of one token one after
     // the other for better I/O performance (because the partitions of one inverted list are stored contiguously in a file).
     // The above implies that we currently require holding all inverted list for a query in memory.
-    protected final ArrayList<IInvertedListCursor> cursorsOrderedByTokens = new ArrayList<IInvertedListCursor>();
+    protected final ArrayList<IInvertedListCursor> cursorsOrderedByTokens = new ArrayList<>();
     protected final InvertedListPartitions partitions = new InvertedListPartitions();
 
     public PartitionedTOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex)
@@ -87,8 +86,9 @@
         }
     }
 
+    @Override
     public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+            IIndexOperationContext ictx) throws HyracksDataException {
         IPartitionedInvertedIndex partInvIndex = (IPartitionedInvertedIndex) invIndex;
         searchResult.reset();
         if (partInvIndex.isEmpty()) {
@@ -104,7 +104,7 @@
 
         occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
         if (occurrenceThreshold <= 0) {
-            throw new OccurrenceThresholdPanicException("Merge Threshold is <= 0. Failing Search.");
+            throw HyracksDataException.create(ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION);
         }
 
         short maxCountPossible = numQueryTokens;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
index 39418f3..508a51d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
@@ -22,25 +22,25 @@
 import java.util.ArrayList;
 
 import org.apache.hyracks.api.context.IHyracksCommonContext;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
 
 public class TOccurrenceSearcher extends AbstractTOccurrenceSearcher {
 
-    protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<IInvertedListCursor>();
+    protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<>();
 
     public TOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex) throws HyracksDataException {
         super(ctx, invIndex);
     }
 
+    @Override
     public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
-            IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+            IIndexOperationContext ictx) throws HyracksDataException {
         tokenizeQuery(searchPred);
         int numQueryTokens = queryTokenAppender.getTupleCount();
 
@@ -56,7 +56,7 @@
         IInvertedIndexSearchModifier searchModifier = searchPred.getSearchModifier();
         occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
         if (occurrenceThreshold <= 0) {
-            throw new OccurrenceThresholdPanicException("Merge threshold is <= 0. Failing Search.");
+            throw HyracksDataException.create(ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION);
         }
         int numPrefixLists = searchModifier.getNumPrefixLists(occurrenceThreshold, invListCursors.size());
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
index c6a552c..075cded 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
@@ -34,7 +34,6 @@
 import org.apache.hyracks.storage.am.common.api.IPageManager;
 import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -69,8 +68,8 @@
             IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
             IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
             FileReference btreeFileRef) throws HyracksDataException {
-        return new InMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits,
-                invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, btreeFileRef);
+        return new InMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits, invListCmpFactories,
+                tokenTypeTraits, tokenCmpFactories, tokenizerFactory, btreeFileRef);
     }
 
     public static InMemoryInvertedIndex createPartitionedInMemoryBTreeInvertedindex(IBufferCache memBufferCache,
@@ -86,7 +85,7 @@
             IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits,
             IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
             IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile,
-            IPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+            IPageManagerFactory pageManagerFactory) throws HyracksDataException {
         IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
         FileReference btreeFile = getBTreeFile(ioManager, invListsFile);
         return new OnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits, invListCmpFactories,
@@ -97,7 +96,7 @@
             IBufferCache bufferCache, IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits,
             IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
             IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile,
-            IPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+            IPageManagerFactory pageManagerFactory) throws HyracksDataException {
         IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
         FileReference btreeFile = getBTreeFile(ioManager, invListsFile);
         return new PartitionedOnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits,
@@ -109,10 +108,9 @@
         return ioManager.resolveAbsolutePath(invListsFile.getFile().getPath() + "_btree");
     }
 
-    public static BTreeFactory createDeletedKeysBTreeFactory(IIOManager ioManager,
-            IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
-            IBinaryComparatorFactory[] invListCmpFactories, IBufferCache diskBufferCache,
-            IPageManagerFactory freePageManagerFactory) throws HyracksDataException {
+    public static BTreeFactory createDeletedKeysBTreeFactory(IIOManager ioManager, IFileMapProvider diskFileMapProvider,
+            ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
+            IBufferCache diskBufferCache, IPageManagerFactory freePageManagerFactory) throws HyracksDataException {
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(invListTypeTraits);
         ITreeIndexFrameFactory leafFrameFactory =
                 BTreeUtils.getLeafFrameFactory(tupleWriterFactory, BTreeLeafFrameType.REGULAR_NSM);
@@ -132,7 +130,7 @@
             ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
             int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable,
-            IMetadataPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+            IMetadataPageManagerFactory pageManagerFactory) throws HyracksDataException {
 
         BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(ioManager, diskFileMapProvider,
                 invListTypeTraits, invListCmpFactories, diskBufferCache, pageManagerFactory);
@@ -166,9 +164,9 @@
         LSMInvertedIndex invIndex = new LSMInvertedIndex(ioManager, virtualBufferCaches, invIndexFactory,
                 deletedKeysBTreeFactory, bloomFilterFactory, filterFactory, filterFrameFactory, filterManager,
                 bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories,
-                tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTracker, ioScheduler,
-                ioOpCallback, invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps,
-                invertedIndexFieldsForNonBulkLoadOps, durable);
+                tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTracker, ioScheduler, ioOpCallback,
+                invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps,
+                durable);
         return invIndex;
     }
 
@@ -181,7 +179,7 @@
             ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
             int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable,
-            IPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+            IPageManagerFactory pageManagerFactory) throws HyracksDataException {
 
         BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(ioManager, diskFileMapProvider,
                 invListTypeTraits, invListCmpFactories, diskBufferCache, pageManagerFactory);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java
index 1b2ff6f..ba61d6c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -95,16 +94,11 @@
             RTreePolicyType rtreePolicyType, ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields)
             throws HyracksDataException {
-        try {
-            return LSMRTreeUtils.createExternalRTree(ctx.getIOManager(), file, diskBufferCache, diskFileMapProvider,
-                    typeTraits,
-                    rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
-                    bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
-                    ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, version, durable,
-                    isPointMBR, (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
+        return LSMRTreeUtils.createExternalRTree(ctx.getIOManager(), file, diskBufferCache, diskFileMapProvider,
+                typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+                bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
+                ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, version, durable,
+                isPointMBR, (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
     }
 
     public int getTargetVersion() {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
index 63633aa..d580756 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -84,16 +83,11 @@
             RTreePolicyType rtreePolicyType, ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields)
             throws HyracksDataException {
-        try {
-            return LSMRTreeUtils.createLSMTree(ctx.getIOManager(), virtualBufferCaches, file, diskBufferCache,
-                    diskFileMapProvider,
-                    typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
-                    bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
-                    ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, btreeFields,
-                    filterTypeTraits, filterCmpFactories, filterFields, durable, isPointMBR,
-                    (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
+        return LSMRTreeUtils.createLSMTree(ctx.getIOManager(), virtualBufferCaches, file, diskBufferCache,
+                diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                rtreePolicyType, bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
+                ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, btreeFields,
+                filterTypeTraits, filterCmpFactories, filterFields, durable, isPointMBR,
+                (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
index 6e0ffaf..81fdbb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -66,15 +65,11 @@
             RTreePolicyType rtreePolicyType, ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields)
             throws HyracksDataException {
-        try {
-            return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ctx.getIOManager(), virtualBufferCaches, file,
-                    diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                    valueProviderFactories, rtreePolicyType, mergePolicy, opTracker, ioScheduler, ioOpCallbackFactory
-                            .createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
-                    filterCmpFactories, filterFields, durable, isPointMBR, (IMetadataPageManagerFactory) opDesc
-                            .getPageManagerFactory());
-        } catch (TreeIndexException e) {
-            throw new HyracksDataException(e);
-        }
+        return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ctx.getIOManager(), virtualBufferCaches, file,
+                diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
+                valueProviderFactories, rtreePolicyType, mergePolicy, opTracker, ioScheduler,
+                ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
+                filterCmpFactories, filterFields, durable, isPointMBR,
+                (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
index 480e1e2..646dbef 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
@@ -23,6 +23,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -36,16 +37,14 @@
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
@@ -184,8 +183,7 @@
 
         if (flushOnExit) {
             BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback);
-            ILSMIndexAccessor accessor =
-                    createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+            ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             accessor.scheduleFlush(cb);
             try {
                 cb.waitForIO();
@@ -258,7 +256,7 @@
 
     @Override
     public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
         cursor.open(ctx.searchInitialState, pred);
     }
@@ -277,7 +275,7 @@
 
     protected LSMRTreeDiskComponent createDiskComponent(ILSMDiskComponentFactory factory, FileReference insertFileRef,
             FileReference deleteFileRef, FileReference bloomFilterFileRef, boolean createComponent)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         // Create new tree instance.
         LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) factory
                 .createComponent(new LSMComponentFileReferences(insertFileRef, deleteFileRef, bloomFilterFileRef));
@@ -343,8 +341,7 @@
     }
 
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
         if (ctx.getOperation() == IndexOperation.PHYSICALDELETE) {
             throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
@@ -368,9 +365,12 @@
             // Insert key into the deleted-keys BTree.
             try {
                 ctx.currentMutableBTreeAccessor.insert(indexTuple);
-            } catch (TreeIndexDuplicateKeyException e) {
-                // Do nothing, because one delete tuple is enough to indicate
-                // that all the corresponding insert tuples are deleted
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    // Do nothing, because one delete tuple is enough to indicate
+                    // that all the corresponding insert tuples are deleted
+                    throw e;
+                }
             }
         }
         if (ctx.filterTuple != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java
index f846c6c..dce7102 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java
@@ -24,6 +24,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -42,12 +43,10 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -198,21 +197,13 @@
         if (diskComponents.size() == 0 && secondDiskComponents.size() == 0) {
             //First time activation
             List<LSMComponentFileReferences> validFileReferences;
-            try {
-                validFileReferences = fileManager.cleanupAndGetValidFiles();
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            validFileReferences = fileManager.cleanupAndGetValidFiles();
             for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
                 LSMRTreeDiskComponent component;
-                try {
-                    component = createDiskComponent(componentFactory,
-                            lsmComonentFileReference.getInsertIndexFileReference(),
-                            lsmComonentFileReference.getDeleteIndexFileReference(),
-                            lsmComonentFileReference.getBloomFilterFileReference(), false);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                component =
+                        createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+                                lsmComonentFileReference.getDeleteIndexFileReference(),
+                                lsmComonentFileReference.getBloomFilterFileReference(), false);
                 diskComponents.add(component);
                 secondDiskComponents.add(component);
             }
@@ -254,7 +245,7 @@
     // we override this method because this index uses a different opcontext
     @Override
     public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ExternalRTreeOpContext ctx = (ExternalRTreeOpContext) ictx;
         List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
         ctx.initialState.setOperationalComponents(operationalComponents);
@@ -267,7 +258,7 @@
     // This can be done in a better way by creating a method boolean
     // keepDeletedTuples(mergedComponents);
     @Override
-    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
         LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
         ITreeIndexCursor cursor = mergeOp.getCursor();
         ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -430,8 +421,7 @@
 
     // Not supported
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         throw new UnsupportedOperationException("tuple modify not supported in LSM-Disk-Only-RTree");
     }
 
@@ -444,7 +434,7 @@
 
     // Not supported
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
         throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-RTree");
     }
 
@@ -492,23 +482,15 @@
     // For initial load
     @Override
     public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
     }
 
     // For transaction bulk load <- could consolidate with the above method ->
     @Override
     public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
     }
 
     // The bulk loader used for both initial loading and transaction
@@ -524,24 +506,16 @@
         private final boolean isTransaction;
 
         public LSMTwoPCRTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex, boolean isTransaction) throws TreeIndexException, HyracksDataException {
+                boolean checkIfEmptyIndex, boolean isTransaction) throws HyracksDataException {
             this.isTransaction = isTransaction;
             // Create the appropriate target
             if (isTransaction) {
-                try {
-                    component = createTransactionTarget();
-                } catch (HyracksDataException | IndexException e) {
-                    throw new TreeIndexException(e);
-                }
+                component = createTransactionTarget();
             } else {
                 if (checkIfEmptyIndex && !isEmptyIndex()) {
-                    throw new TreeIndexException("Cannot load an index that is not empty");
+                    throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
                 }
-                try {
-                    component = createBulkLoadTarget();
-                } catch (HyracksDataException | IndexException e) {
-                    throw new TreeIndexException(e);
-                }
+                component = createBulkLoadTarget();
             }
 
             // Create the three loaders
@@ -557,10 +531,10 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 rtreeBulkLoader.add(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -596,7 +570,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
                 if (!endedBloomFilterLoad) {
                     builder.end();
@@ -623,11 +597,11 @@
         }
 
         @Override
-        public void delete(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             try {
                 btreeBulkLoader.add(tuple);
                 builder.add(tuple);
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -647,12 +621,12 @@
 
         // This method is used to create a target for a bulk modify operation. This
         // component must then eventually be either committed or deleted
-        private ILSMDiskComponent createTransactionTarget() throws HyracksDataException, IndexException {
+        private ILSMDiskComponent createTransactionTarget() throws HyracksDataException {
             LSMComponentFileReferences componentFileRefs;
             try {
                 componentFileRefs = fileManager.getNewTransactionFileReference();
             } catch (IOException e) {
-                throw new HyracksDataException("Failed to create transaction components", e);
+                throw HyracksDataException.create(e);
             }
             return createDiskComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
                     componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
@@ -669,7 +643,7 @@
     // opCtx. first line <- in schedule merge, we->
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ILSMIndexOperationContext rctx = createOpContext(NoOpOperationCallback.INSTANCE, -1);
         rctx.setOperation(IndexOperation.MERGE);
         List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -732,7 +706,7 @@
     }
 
     @Override
-    public void commitTransaction() throws TreeIndexException, HyracksDataException, IndexException {
+    public void commitTransaction() throws HyracksDataException {
         LSMComponentFileReferences componentFileRefrences = fileManager.getTransactionFileReferenceForCommit();
         LSMRTreeDiskComponent component = null;
         if (componentFileRefrences != null) {
@@ -744,21 +718,13 @@
     }
 
     @Override
-    public void abortTransaction() throws TreeIndexException {
-        try {
-            fileManager.deleteTransactionFiles();
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public void abortTransaction() throws HyracksDataException {
+        fileManager.deleteTransactionFiles();
     }
 
     @Override
-    public void recoverTransaction() throws TreeIndexException {
-        try {
-            fileManager.recoverTransaction();
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+    public void recoverTransaction() throws HyracksDataException {
+        fileManager.recoverTransaction();
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
index e62ccdc..497a887 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
@@ -26,6 +26,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -46,18 +47,15 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.DualTupleReference;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -96,8 +94,7 @@
             throws HyracksDataException {
         super(ioManager, virtualBufferCaches, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
                 btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
-                new LSMRTreeDiskComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory,
-                        filterFactory),
+                new LSMRTreeDiskComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, filterFactory),
                 diskFileMapProvider, fieldCount, rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields,
                 linearizerArray, bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback,
                 filterFactory, filterFrameFactory, filterManager, rtreeFields, filterFields, durable, isPointMBR,
@@ -138,22 +135,13 @@
         super.activate();
         List<ILSMDiskComponent> immutableComponents = diskComponents;
         List<LSMComponentFileReferences> validFileReferences;
-        try {
-            validFileReferences = fileManager.cleanupAndGetValidFiles();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        validFileReferences = fileManager.cleanupAndGetValidFiles();
         immutableComponents.clear();
         for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
             LSMRTreeDiskComponent component;
-            try {
-                component =
-                        createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
-                                lsmComonentFileReference.getDeleteIndexFileReference(),
-                                lsmComonentFileReference.getBloomFilterFileReference(), false);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+                    lsmComonentFileReference.getDeleteIndexFileReference(),
+                    lsmComonentFileReference.getBloomFilterFileReference(), false);
             immutableComponents.add(component);
         }
         isActivated = true;
@@ -225,7 +213,7 @@
     }
 
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
         LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
         LSMRTreeMemoryComponent flushingComponent = (LSMRTreeMemoryComponent) flushOp.getFlushingComponent();
         // Renaming order is critical because we use assume ordering when we
@@ -337,7 +325,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ILSMIndexOperationContext rctx =
                 createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         rctx.setOperation(IndexOperation.MERGE);
@@ -351,7 +339,7 @@
     }
 
     @Override
-    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
         LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
         ITreeIndexCursor cursor = mergeOp.getCursor();
         ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -444,21 +432,21 @@
         }
 
         @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.DELETE);
             dualTuple.reset(tuple);
             lsmHarness.modify(ctx, false, dualTuple);
         }
 
         @Override
-        public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public boolean tryDelete(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.DELETE);
             dualTuple.reset(tuple);
             return lsmHarness.modify(ctx, true, dualTuple);
         }
 
         @Override
-        public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void forceDelete(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.DELETE);
             dualTuple.reset(tuple);
             lsmHarness.forceModify(ctx, dualTuple);
@@ -470,27 +458,21 @@
         }
     }
 
-    protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+    protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
         LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
         return createDiskComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
-                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
-                true);
+                componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
     }
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
     }
 
     // This function is modified for R-Trees without antimatter tuples to allow buddy B-Tree to have only primary keys
     @Override
-    public void modify(IIndexOperationContext ictx, ITupleReference tuple)
-            throws HyracksDataException, IndexException {
+    public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
         LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
         if (ctx.getOperation() == IndexOperation.PHYSICALDELETE) {
             throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
@@ -513,9 +495,12 @@
             ctx.currentMutableRTreeAccessor.delete(indexTuple);
             try {
                 ctx.currentMutableBTreeAccessor.insert(((DualTupleReference) tuple).getPermutingTuple());
-            } catch (TreeIndexDuplicateKeyException e) {
+            } catch (HyracksDataException e) {
                 // Do nothing, because one delete tuple is enough to indicate
                 // that all the corresponding insert tuples are deleted
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         if (ctx.filterTuple != null) {
@@ -536,17 +521,13 @@
         public final MultiComparator filterCmp;
 
         public LSMRTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException {
+                boolean checkIfEmptyIndex) throws HyracksDataException {
             if (checkIfEmptyIndex && !isEmptyIndex()) {
-                throw new TreeIndexException("Cannot load an index that is not empty");
+                throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
             }
             // Note that by using a flush target file name, we state that the
             // new bulk loaded tree is "newer" than any other merged tree.
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException | IndexException e) {
-                throw new TreeIndexException(e);
-            }
+            component = createBulkLoadTarget();
             bulkLoader = ((LSMRTreeDiskComponent) component).getRTree().createBulkLoader(fillFactor, verifyInput,
                     numElementsHint, false);
             buddyBTreeBulkloader = ((LSMRTreeDiskComponent) component).getBTree().createBulkLoader(fillFactor,
@@ -563,7 +544,7 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 ITupleReference t;
                 if (indexTuple != null) {
@@ -579,7 +560,7 @@
                     filterTuple.reset(tuple);
                     component.getLSMComponentFilter().update(filterTuple, filterCmp);
                 }
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -589,7 +570,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
 
                 if (component.getLSMComponentFilter() != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java
index 22a1054..409649b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
@@ -42,13 +41,12 @@
     }
 
     @Override
-    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
         return false;
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
-            IndexException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
         cmp = lsmInitialState.getBTreeCmp();
         operationalComponents = lsmInitialState.getOperationalComponents();
@@ -63,7 +61,7 @@
             ILSMComponent component = operationalComponents.get(i);
             IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory().createFrame();
             rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
-            BTree btree = (BTree) ((LSMRTreeDiskComponent) component).getBTree();
+            BTree btree = ((LSMRTreeDiskComponent) component).getBTree();
             btreeAccessors[i] = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             btreeAccessors[i].search(rangeCursors[i], btreePredicate);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
index cb6c065..f9ee5c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
@@ -34,7 +34,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
 import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
@@ -74,8 +73,8 @@
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
         return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
-                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING));
+                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -88,12 +87,12 @@
         // Get the range of timestamps by taking the earliest and the latest
         // timestamps
         return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + RTREE_STRING),
-                createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), createMergeFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING));
+                createMergeFile(baseName + SPLIT_STRING + BTREE_STRING),
+                createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
-    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+    public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
         List<LSMComponentFileReferences> validFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allRTreeFiles = new ArrayList<>();
         ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<>();
@@ -110,7 +109,8 @@
             btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
         }
         validateFiles(btreeFilesSet, allRTreeFiles, getCompoundFilter(transactionFilter, rtreeFilter), rtreeFactory);
-        validateFiles(btreeFilesSet, allBloomFilterFiles, getCompoundFilter(transactionFilter, bloomFilterFilter), null);
+        validateFiles(btreeFilesSet, allBloomFilterFiles, getCompoundFilter(transactionFilter, bloomFilterFilter),
+                null);
 
         // Sanity check.
         if (allRTreeFiles.size() != allBTreeFiles.size() || allBTreeFiles.size() != allBloomFilterFiles.size()) {
@@ -175,7 +175,8 @@
                 invalidBloomFilterFile.delete();
             } else {
                 // This scenario should not be possible.
-                throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+                throw new HyracksDataException(
+                        "Found LSM files with overlapping but not contained timetamp intervals.");
             }
         }
 
@@ -206,8 +207,8 @@
 
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
-                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING));
+                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
index 013821d..618b3a7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -70,7 +69,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.flush(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
index 6a12ab2..847533f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
@@ -26,7 +26,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -82,7 +81,7 @@
     }
 
     @Override
-    public Boolean call() throws HyracksDataException, IndexException {
+    public Boolean call() throws HyracksDataException {
         accessor.merge(this);
         return true;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
index a11f742..08458d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
@@ -23,7 +23,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 
@@ -67,16 +66,12 @@
     private void searchNextCursor() throws HyracksDataException {
         if (currentCursor < numberOfTrees) {
             rtreeCursors[currentCursor].reset();
-            try {
-                rtreeAccessors[currentCursor].search(rtreeCursors[currentCursor], rtreeSearchPredicate);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            rtreeAccessors[currentCursor].search(rtreeCursors[currentCursor], rtreeSearchPredicate);
         }
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         if (foundNext) {
             return true;
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
index d9da016..c536712 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 
@@ -37,8 +36,8 @@
     private int foundIn = -1;
     private PermutingTupleReference btreeTuple;
 
-    public LSMRTreeSortedCursor(ILSMIndexOperationContext opCtx, ILinearizeComparatorFactory linearizer, int[] buddyBTreeFields)
-            throws HyracksDataException {
+    public LSMRTreeSortedCursor(ILSMIndexOperationContext opCtx, ILinearizeComparatorFactory linearizer,
+            int[] buddyBTreeFields) throws HyracksDataException {
         super(opCtx);
         this.linearizeCmp = linearizer.createBinaryComparator();
         this.btreeTuple = new PermutingTupleReference(buddyBTreeFields);
@@ -56,11 +55,7 @@
         try {
             for (int i = 0; i < numberOfTrees; i++) {
                 rtreeCursors[i].reset();
-                try {
-                    rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
                 if (rtreeCursors[i].hasNext()) {
                     rtreeCursors[i].next();
                 } else {
@@ -75,7 +70,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         while (!foundNext) {
             frameTuple = null;
 
@@ -89,8 +84,9 @@
 
             foundIn = -1;
             for (int i = 0; i < numberOfTrees; i++) {
-                if (depletedRtreeCursors[i])
+                if (depletedRtreeCursors[i]) {
                     continue;
+                }
 
                 if (frameTuple == null) {
                     frameTuple = rtreeCursors[i].getTuple();
@@ -99,28 +95,25 @@
                 }
 
                 if (linearizeCmp.compare(frameTuple.getFieldData(0), frameTuple.getFieldStart(0),
-                        frameTuple.getFieldLength(0) * linearizeCmp.getDimensions(), rtreeCursors[i].getTuple()
-                                .getFieldData(0), rtreeCursors[i].getTuple().getFieldStart(0), rtreeCursors[i]
-                                .getTuple().getFieldLength(0) * linearizeCmp.getDimensions()) > 0) {
+                        frameTuple.getFieldLength(0) * linearizeCmp.getDimensions(),
+                        rtreeCursors[i].getTuple().getFieldData(0), rtreeCursors[i].getTuple().getFieldStart(0),
+                        rtreeCursors[i].getTuple().getFieldLength(0) * linearizeCmp.getDimensions()) > 0) {
                     frameTuple = rtreeCursors[i].getTuple();
                     foundIn = i;
                 }
             }
 
-            if (foundIn == -1)
+            if (foundIn == -1) {
                 return false;
+            }
 
             boolean killed = false;
             btreeTuple.reset(frameTuple);
             for (int i = 0; i < foundIn; i++) {
-                try {
-                    btreeCursors[i].reset();
-                    btreeRangePredicate.setHighKey(btreeTuple, true);
-                    btreeRangePredicate.setLowKey(btreeTuple, true);
-                    btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
-                } catch (IndexException e) {
-                    throw new HyracksDataException(e);
-                }
+                btreeCursors[i].reset();
+                btreeRangePredicate.setHighKey(btreeTuple, true);
+                btreeRangePredicate.setLowKey(btreeTuple, true);
+                btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
                 try {
                     if (btreeCursors[i].hasNext()) {
                         killed = true;
@@ -151,11 +144,7 @@
         foundNext = false;
         for (int i = 0; i < numberOfTrees; i++) {
             rtreeCursors[i].reset();
-            try {
-                rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
             if (rtreeCursors[i].hasNext()) {
                 rtreeCursors[i].next();
             } else {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
index 179865a..b246e8f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
@@ -26,6 +26,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -38,17 +39,15 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -104,19 +103,11 @@
         List<ILSMDiskComponent> immutableComponents = diskComponents;
         immutableComponents.clear();
         List<LSMComponentFileReferences> validFileReferences;
-        try {
-            validFileReferences = fileManager.cleanupAndGetValidFiles();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        }
+        validFileReferences = fileManager.cleanupAndGetValidFiles();
         for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
             LSMRTreeDiskComponent component;
-            try {
-                component = createDiskComponent(componentFactory,
-                        lsmComonentFileReference.getInsertIndexFileReference(), null, null, false);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
+            component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+                    null, null, false);
             immutableComponents.add(component);
         }
         isActivated = true;
@@ -175,7 +166,7 @@
     }
 
     @Override
-    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
         LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
         // Renaming order is critical because we use assume ordering when we
         // read the file names when we open the tree.
@@ -268,7 +259,7 @@
 
     @Override
     public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         LSMRTreeOpContext rctx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         rctx.setOperation(IndexOperation.MERGE);
         List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -285,7 +276,7 @@
     }
 
     @Override
-    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+    public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
         LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
         ITreeIndexCursor cursor = mergeOp.getCursor();
         ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -346,13 +337,8 @@
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
-        try {
-            return new LSMRTreeWithAntiMatterTuplesBulkLoader(fillLevel, verifyInput, numElementsHint,
-                    checkIfEmptyIndex);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+            boolean checkIfEmptyIndex) throws HyracksDataException {
+        return new LSMRTreeWithAntiMatterTuplesBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
     }
 
     public class LSMRTreeWithAntiMatterTuplesBulkLoader implements IIndexBulkLoader {
@@ -365,17 +351,14 @@
         public final MultiComparator filterCmp;
 
         public LSMRTreeWithAntiMatterTuplesBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-                boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException {
+                boolean checkIfEmptyIndex) throws HyracksDataException {
             if (checkIfEmptyIndex && !isEmptyIndex()) {
-                throw new TreeIndexException("Cannot load an index that is not empty");
+                throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
             }
             // Note that by using a flush target file name, we state that the
             // new bulk loaded tree is "newer" than any other merged tree.
-            try {
-                component = createBulkLoadTarget();
-            } catch (HyracksDataException | IndexException e) {
-                throw new TreeIndexException(e);
-            }
+
+            component = createBulkLoadTarget();
             bulkLoader = ((LSMRTreeDiskComponent) component).getRTree().createBulkLoader(fillFactor, verifyInput,
                     numElementsHint, false);
 
@@ -391,7 +374,7 @@
         }
 
         @Override
-        public void add(ITupleReference tuple) throws HyracksDataException, IndexException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 ITupleReference t;
                 if (indexTuple != null) {
@@ -408,7 +391,7 @@
                     component.getLSMComponentFilter().update(filterTuple, filterCmp);
                 }
 
-            } catch (IndexException | HyracksDataException | RuntimeException e) {
+            } catch (Exception e) {
                 cleanupArtifacts();
                 throw e;
             }
@@ -418,7 +401,7 @@
         }
 
         @Override
-        public void end() throws HyracksDataException, IndexException {
+        public void end() throws HyracksDataException {
             if (!cleanedUpArtifacts) {
 
                 if (component.getLSMComponentFilter() != null) {
@@ -451,7 +434,7 @@
             }
         }
 
-        private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+        private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
             LSMComponentFileReferences relFlushFileRefs = fileManager.getRelFlushFileReference();
             return createDiskComponent(bulkLoaComponentFactory, relFlushFileRefs.getInsertIndexFileReference(), null,
                     null, true);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
index a913b81..98ab803 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -68,8 +67,7 @@
     }
 
     @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
-            IndexException {
+    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
         LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
         cmp = lsmInitialState.getHilbertCmp();
         btreeCmp = lsmInitialState.getBTreeCmp();
@@ -99,16 +97,16 @@
         btreeAccessors = new ITreeIndexAccessor[numMutableComponents];
         for (int i = 0; i < numMutableComponents; i++) {
             ILSMComponent component = operationalComponents.get(i);
-            RTree rtree = (RTree) ((LSMRTreeMemoryComponent) component).getRTree();
-            BTree btree = (BTree) ((LSMRTreeMemoryComponent) component).getBTree();
-            mutableRTreeCursors[i] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState
-                    .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState
-                    .getRTreeLeafFrameFactory().createFrame());
-            btreeCursors[i] = new BTreeRangeSearchCursor((IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory()
-                    .createFrame(), false);
+            RTree rtree = ((LSMRTreeMemoryComponent) component).getRTree();
+            BTree btree = ((LSMRTreeMemoryComponent) component).getBTree();
+            mutableRTreeCursors[i] = new RTreeSearchCursor(
+                    (IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory().createFrame(),
+                    (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
+            btreeCursors[i] = new BTreeRangeSearchCursor(
+                    (IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory().createFrame(), false);
             btreeAccessors[i] = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            mutableRTreeAccessors[i] = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            mutableRTreeAccessors[i] =
+                    rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         }
 
         rangeCursors = new RTreeSearchCursor[numImmutableComponents];
@@ -116,12 +114,12 @@
         int j = 0;
         for (int i = numMutableComponents; i < operationalComponents.size(); i++) {
             ILSMComponent component = operationalComponents.get(i);
-            rangeCursors[j] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState
-                    .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState
-                    .getRTreeLeafFrameFactory().createFrame());
-            RTree rtree = (RTree) ((LSMRTreeDiskComponent) component).getRTree();
-            immutableRTreeAccessors[j] = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
-                    NoOpOperationCallback.INSTANCE);
+            rangeCursors[j] = new RTreeSearchCursor(
+                    (IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory().createFrame(),
+                    (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
+            RTree rtree = ((LSMRTreeDiskComponent) component).getRTree();
+            immutableRTreeAccessors[j] =
+                    rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
             immutableRTreeAccessors[j].search(rangeCursors[j], searchPred);
             j++;
         }
@@ -131,7 +129,7 @@
         open = true;
     }
 
-    private void searchNextCursor() throws HyracksDataException, IndexException {
+    private void searchNextCursor() throws HyracksDataException {
         if (currentCursor < numMutableComponents) {
             mutableRTreeCursors[currentCursor].reset();
             mutableRTreeAccessors[currentCursor].search(mutableRTreeCursors[currentCursor], rtreeSearchPredicate);
@@ -139,7 +137,7 @@
     }
 
     @Override
-    public boolean hasNext() throws HyracksDataException, IndexException {
+    public boolean hasNext() throws HyracksDataException {
         if (includeMutableComponent) {
             if (foundNext) {
                 return true;
@@ -196,7 +194,7 @@
     }
 
     @Override
-    public void reset() throws HyracksDataException, IndexException {
+    public void reset() throws HyracksDataException {
         if (!open) {
             return;
         }
@@ -233,8 +231,7 @@
         return cmp.selectiveFieldCompare(tupleA, tupleB, comparatorFields);
     }
 
-    private boolean searchMemBTrees(ITupleReference tuple, int lastBTreeToSearch) throws HyracksDataException,
-            IndexException {
+    private boolean searchMemBTrees(ITupleReference tuple, int lastBTreeToSearch) throws HyracksDataException {
         for (int i = 0; i < lastBTreeToSearch; i++) {
             btreeCursors[i].reset();
             btreeRangePredicate.setHighKey(tuple, true);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
index 624c7de..12226cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
@@ -24,6 +24,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
@@ -37,7 +38,6 @@
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -72,33 +72,32 @@
 
 public class LSMRTreeUtils {
     public static LSMRTree createLSMTree(IIOManager ioManager, List<IVirtualBufferCache> virtualBufferCaches,
-            FileReference file,
-            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
+            FileReference file, IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider,
+            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+            IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
+            RTreePolicyType rtreePolicyType, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+            ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
             ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields, int[] buddyBTreeFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
             boolean durable, boolean isPointMBR, IMetadataPageManagerFactory freePageManagerFactory)
-            throws TreeIndexException, HyracksDataException {
+            throws HyracksDataException {
         int valueFieldCount = buddyBTreeFields.length;
         int keyFieldCount = typeTraits.length - valueFieldCount;
         ITypeTraits[] btreeTypeTraits = new ITypeTraits[valueFieldCount];
         for (int i = 0; i < valueFieldCount; i++) {
             btreeTypeTraits[i] = typeTraits[buddyBTreeFields[i]];
         }
-        ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
-                typeTraits, false);
+        ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory =
+                new LSMTypeAwareTupleWriterFactory(typeTraits, false);
         ITreeIndexTupleWriterFactory rtreeLeafFrameTupleWriterFactory = null;
         if (isPointMBR) {
-            rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
-                    valueFieldCount, false);
+            rtreeLeafFrameTupleWriterFactory =
+                    new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, false);
         } else {
             rtreeLeafFrameTupleWriterFactory = rtreeInteriorFrameTupleWriterFactory;
         }
-        ITreeIndexTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(btreeTypeTraits,
-                true);
+        ITreeIndexTupleWriterFactory btreeTupleWriterFactory =
+                new LSMTypeAwareTupleWriterFactory(btreeTypeTraits, true);
         ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
                 rtreeInteriorFrameTupleWriterFactory, valueProviderFactories, rtreePolicyType, isPointMBR);
         ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeLeafFrameTupleWriterFactory,
@@ -108,9 +107,9 @@
         TreeIndexFactory<RTree> diskRTreeFactory = new RTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
                 freePageManagerFactory, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories,
                 typeTraits.length, isPointMBR);
-        TreeIndexFactory<BTree> diskBTreeFactory = new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories,
-                btreeTypeTraits.length);
+        TreeIndexFactory<BTree> diskBTreeFactory =
+                new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider, freePageManagerFactory,
+                        btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories, btreeTypeTraits.length);
 
         int[] comparatorFields = { 0 };
         IBinaryComparatorFactory[] linearizerArray = { linearizeCmpFactory };
@@ -119,8 +118,8 @@
         for (int i = 0; i < btreeCmpFactories.length; i++) {
             bloomFilterKeyFields[i] = i;
         }
-        BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
-                bloomFilterKeyFields);
+        BloomFilterFactory bloomFilterFactory =
+                new BloomFilterFactory(diskBufferCache, diskFileMapProvider, bloomFilterKeyFields);
 
         LSMComponentFilterFactory filterFactory = null;
         LSMComponentFilterFrameFactory filterFrameFactory = null;
@@ -131,16 +130,14 @@
             filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory);
             filterManager = new LSMComponentFilterManager(filterFrameFactory);
         }
-        ILSMIndexFileManager fileNameManager = new LSMRTreeFileManager(ioManager, diskFileMapProvider, file,
-                diskRTreeFactory,
-                diskBTreeFactory);
+        ILSMIndexFileManager fileNameManager =
+                new LSMRTreeFileManager(ioManager, diskFileMapProvider, file, diskRTreeFactory, diskBTreeFactory);
         LSMRTree lsmTree = new LSMRTree(ioManager, virtualBufferCaches, rtreeInteriorFrameFactory,
-                rtreeLeafFrameFactory,
-                btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager, diskRTreeFactory, diskBTreeFactory,
-                bloomFilterFactory, filterFactory, filterFrameFactory, filterManager, bloomFilterFalsePositiveRate,
-                diskFileMapProvider, typeTraits.length, rtreeCmpFactories, btreeCmpFactories, linearizeCmpFactory,
-                comparatorFields, linearizerArray, mergePolicy, opTracker, ioScheduler, ioOpCallback, rtreeFields,
-                buddyBTreeFields, filterFields, durable, isPointMBR);
+                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+                diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, filterFactory, filterFrameFactory,
+                filterManager, bloomFilterFalsePositiveRate, diskFileMapProvider, typeTraits.length, rtreeCmpFactories,
+                btreeCmpFactories, linearizeCmpFactory, comparatorFields, linearizerArray, mergePolicy, opTracker,
+                ioScheduler, ioOpCallback, rtreeFields, buddyBTreeFields, filterFields, durable, isPointMBR);
         return lsmTree;
     }
 
@@ -153,18 +150,18 @@
             ILSMIOOperationCallback ioOpCallback, ILinearizeComparatorFactory linearizerCmpFactory, int[] rtreeFields,
             ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
             boolean durable, boolean isPointMBR, IMetadataPageManagerFactory freePageManagerFactory)
-            throws TreeIndexException, HyracksDataException {
-        ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits,
-                false);
+            throws HyracksDataException {
+        ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory =
+                new LSMRTreeTupleWriterFactory(typeTraits, false);
         ITreeIndexTupleWriterFactory rtreeLeafFrameTupleWriterFactory;
         ITreeIndexTupleWriterFactory rtreeLeafFrameCopyTupleWriterFactory;
         if (isPointMBR) {
             int keyFieldCount = rtreeCmpFactories.length;
             int valueFieldCount = btreeCmpFactories.length - keyFieldCount;
-            rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
-                    valueFieldCount, true);
-            rtreeLeafFrameCopyTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
-                    valueFieldCount, true);
+            rtreeLeafFrameTupleWriterFactory =
+                    new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, true);
+            rtreeLeafFrameCopyTupleWriterFactory =
+                    new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, true);
 
         } else {
             rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits, false);
@@ -193,8 +190,8 @@
         // The first field is for the sorted curve (e.g. Hilbert curve), and the
         // second field is for the primary key.
         int[] comparatorFields = new int[btreeCmpFactories.length - rtreeCmpFactories.length + 1];
-        IBinaryComparatorFactory[] linearizerArray = new IBinaryComparatorFactory[btreeCmpFactories.length
-                - rtreeCmpFactories.length + 1];
+        IBinaryComparatorFactory[] linearizerArray =
+                new IBinaryComparatorFactory[btreeCmpFactories.length - rtreeCmpFactories.length + 1];
 
         comparatorFields[0] = 0;
         for (int i = 1; i < comparatorFields.length; i++) {
@@ -216,9 +213,8 @@
             filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory);
             filterManager = new LSMComponentFilterManager(filterFrameFactory);
         }
-        ILSMIndexFileManager fileNameManager = new LSMRTreeWithAntiMatterTuplesFileManager(ioManager,
-                diskFileMapProvider, file,
-                diskRTreeFactory);
+        ILSMIndexFileManager fileNameManager =
+                new LSMRTreeWithAntiMatterTuplesFileManager(ioManager, diskFileMapProvider, file, diskRTreeFactory);
         LSMRTreeWithAntiMatterTuples lsmTree = new LSMRTreeWithAntiMatterTuples(ioManager, virtualBufferCaches,
                 rtreeInteriorFrameFactory, rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory,
                 fileNameManager, diskRTreeFactory, bulkLoadRTreeFactory, filterFactory, filterFrameFactory,
@@ -229,15 +225,14 @@
     }
 
     public static ExternalRTree createExternalRTree(IIOManager ioManager, FileReference file,
-            IBufferCache diskBufferCache,
-            IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
+            IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
             IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
             IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
             double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
             ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
             ILinearizeComparatorFactory linearizeCmpFactory, int[] buddyBTreeFields, int startWithVersion,
             boolean durable, boolean isPointMBR, IMetadataPageManagerFactory freePageManagerFactory)
-            throws TreeIndexException {
+            throws HyracksDataException {
 
         int keyFieldCount = rtreeCmpFactories.length;
         int valueFieldCount = typeTraits.length - keyFieldCount;
@@ -245,17 +240,17 @@
         for (int i = 0; i < buddyBTreeFields.length; i++) {
             btreeTypeTraits[i] = typeTraits[buddyBTreeFields[i]];
         }
-        ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
-                typeTraits, false);
+        ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory =
+                new LSMTypeAwareTupleWriterFactory(typeTraits, false);
         ITreeIndexTupleWriterFactory rtreeLeafFrameTupleWriterFactory = null;
         if (isPointMBR) {
-            rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
-                    valueFieldCount, false);
+            rtreeLeafFrameTupleWriterFactory =
+                    new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, false);
         } else {
             rtreeLeafFrameTupleWriterFactory = rtreeInteriorFrameTupleWriterFactory;
         }
-        ITreeIndexTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(btreeTypeTraits,
-                true);
+        ITreeIndexTupleWriterFactory btreeTupleWriterFactory =
+                new LSMTypeAwareTupleWriterFactory(btreeTypeTraits, true);
         ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
                 rtreeInteriorFrameTupleWriterFactory, valueProviderFactories, rtreePolicyType, isPointMBR);
         ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeLeafFrameTupleWriterFactory,
@@ -265,9 +260,9 @@
         TreeIndexFactory<RTree> diskRTreeFactory = new RTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
                 freePageManagerFactory, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories,
                 typeTraits.length, isPointMBR);
-        TreeIndexFactory<BTree> diskBTreeFactory = new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
-                freePageManagerFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories,
-                btreeTypeTraits.length);
+        TreeIndexFactory<BTree> diskBTreeFactory =
+                new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider, freePageManagerFactory,
+                        btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories, btreeTypeTraits.length);
         int[] comparatorFields = { 0 };
         IBinaryComparatorFactory[] linearizerArray = { linearizeCmpFactory };
 
@@ -275,12 +270,11 @@
         for (int i = 0; i < btreeCmpFactories.length; i++) {
             bloomFilterKeyFields[i] = i;
         }
-        BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
-                bloomFilterKeyFields);
+        BloomFilterFactory bloomFilterFactory =
+                new BloomFilterFactory(diskBufferCache, diskFileMapProvider, bloomFilterKeyFields);
 
-        ILSMIndexFileManager fileNameManager = new LSMRTreeFileManager(ioManager, diskFileMapProvider, file,
-                diskRTreeFactory,
-                diskBTreeFactory);
+        ILSMIndexFileManager fileNameManager =
+                new LSMRTreeFileManager(ioManager, diskFileMapProvider, file, diskRTreeFactory, diskBTreeFactory);
         ExternalRTree lsmTree = new ExternalRTree(ioManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
                 btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager, diskRTreeFactory, diskBTreeFactory,
                 bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider, typeTraits.length,
@@ -291,10 +285,10 @@
     }
 
     public static ILinearizeComparatorFactory proposeBestLinearizer(ITypeTraits[] typeTraits, int numKeyFields)
-            throws TreeIndexException {
+            throws HyracksDataException {
         for (int i = 0; i < numKeyFields; i++) {
             if (!(typeTraits[i].getClass().equals(typeTraits[0].getClass()))) {
-                throw new TreeIndexException("Cannot propose linearizer if dimensions have different types");
+                throw HyracksDataException.create(ErrorCode.CANNOT_PROPOSE_LINEARIZER_DIFF_DIMENSIONS);
             }
         }
 
@@ -306,6 +300,7 @@
             return new ZCurveIntComparatorFactory(numKeyFields / 2);
         }
 
-        throw new TreeIndexException("Cannot propose linearizer");
+        throw HyracksDataException.create(ErrorCode.CANNOT_PROPOSE_LINEARIZER_FOR_TYPE,
+                typeTraits[0].getClass().getSimpleName());
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
index 892b715..b941989 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -21,7 +21,6 @@
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.rtree.impls.PathList;
 
@@ -41,7 +40,7 @@
     public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp)
             throws HyracksDataException;
 
-    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException;
+    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws HyracksDataException;
 
     public void enlarge(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index efdb67f..e24ab6b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -23,6 +23,7 @@
 import java.util.Collections;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -30,7 +31,6 @@
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.frames.AbstractSlotManager;
 import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -41,12 +41,12 @@
 public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
 
     public static final int childPtrSize = 4;
-    private IBinaryComparator childPtrCmp = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
-            .createBinaryComparator();
+    private IBinaryComparator childPtrCmp =
+            PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
     private final int keyFieldCount;
 
     public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
-                                 RTreePolicyType rtreePolicyType, boolean isPointMBR) {
+            RTreePolicyType rtreePolicyType, boolean isPointMBR) {
         super(tupleWriter, keyValueProviders, rtreePolicyType, isPointMBR);
         keyFieldCount = keyValueProviders.length;
         frameTuple.setFieldCount(keyFieldCount);
@@ -130,7 +130,8 @@
             if (c == 0) {
                 return i;
             } else {
-                int pageId = IntegerPointable.getInteger(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(frameTuple));
+                int pageId = IntegerPointable.getInteger(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
+                        getChildPointerOff(frameTuple));
                 traverseList.add(pageId, -1, parentIndex);
             }
         }
@@ -197,19 +198,15 @@
     }
 
     @Override
-    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException {
+    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws HyracksDataException {
         frameTuple.setFieldCount(cmp.getKeyFieldCount());
         if (tupleIndex == -1) {
-            try {
-                tupleIndex = findTupleByPointer(tuple, cmp);
-            } catch (HyracksDataException e) {
-                throw new TreeIndexException(e);
-            }
+            tupleIndex = findTupleByPointer(tuple, cmp);
         }
         if (tupleIndex != -1) {
             tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
         } else {
-            throw new TreeIndexException("Error: Faild to find a tuple in a page");
+            throw HyracksDataException.create(ErrorCode.FAILED_TO_FIND_TUPLE);
 
         }
 
@@ -217,9 +214,9 @@
 
     protected int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp)
             throws HyracksDataException {
-        return childPtrCmp
-                .compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleA), childPtrSize,
-                        tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB), childPtrSize);
+        return childPtrCmp.compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleA),
+                childPtrSize, tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB),
+                childPtrSize);
     }
 
     @Override
@@ -243,8 +240,8 @@
 
         buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
         buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + tupleSize);
-        buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager
-                .getSlotSize());
+        buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+                buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager.getSlotSize());
 
     }
 
@@ -298,8 +295,9 @@
         for (int i = 0; i < tupleCount; i++) {
             int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
             frameTuple.resetByTupleOffset(buf.array(), tupleOff);
-            int intVal = IntegerPointable.getInteger(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+            int intVal =
+                    IntegerPointable.getInteger(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
             ret.add(intVal);
         }
         return ret;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java
index 9cd16c4..7b2ed48 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java
@@ -26,6 +26,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -41,8 +42,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.frames.AbstractSlotManager;
 import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import org.apache.hyracks.storage.am.common.impls.AbstractTreeIndex;
@@ -159,63 +158,6 @@
                 modificationCallback);
     }
 
-    private void insert(ITupleReference tuple, IIndexOperationContext ictx)
-            throws HyracksDataException, TreeIndexException {
-        RTreeOpContext ctx = (RTreeOpContext) ictx;
-        int tupleSize = Math.max(ctx.leafFrame.getBytesRequiredToWriteTuple(tuple),
-                ctx.interiorFrame.getBytesRequiredToWriteTuple(tuple));
-        if (tupleSize > maxTupleSize) {
-            throw new TreeIndexException("Record size (" + tupleSize + ") larger than maximum acceptable record size ("
-                    + maxTupleSize + ")");
-        }
-        ctx.reset();
-        ctx.setTuple(tuple);
-        ctx.splitKey.reset();
-        ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
-        ctx.splitKey.getRightTuple().setFieldCount(cmpFactories.length);
-        ctx.modificationCallback.before(tuple);
-
-        int maxFieldPos = cmpFactories.length / 2;
-        for (int i = 0; i < maxFieldPos; i++) {
-            int j = maxFieldPos + i;
-            int c = ctx.cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
-            if (c > 0) {
-                throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
-            }
-        }
-
-        try {
-            ICachedPage leafNode = findLeaf(ctx);
-
-            int pageId = ctx.pathList.getLastPageId();
-            ctx.pathList.moveLast();
-            insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
-
-            while (true) {
-                if (ctx.splitKey.getLeftPageBuffer() != null) {
-                    updateParentForInsert(ctx);
-                } else {
-                    break;
-                }
-            }
-        } finally {
-            for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
-                ICachedPage node = ctx.NSNUpdates.get(i);
-                ctx.interiorFrame.setPage(node);
-                ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
-            }
-
-            for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
-                ICachedPage node = ctx.LSNUpdates.get(i);
-                ctx.interiorFrame.setPage(node);
-                ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
-                node.releaseWriteLatch(true);
-                bufferCache.unpin(node);
-            }
-        }
-    }
-
     private ICachedPage findLeaf(RTreeOpContext ctx) throws HyracksDataException {
         int pageId = rootPage;
         boolean writeLatched = false;
@@ -342,7 +284,7 @@
     }
 
     private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
-            throws HyracksDataException, TreeIndexException {
+            throws HyracksDataException {
         boolean succeeded = false;
         FrameOpSpaceStatus spaceStatus;
         if (!isLeaf) {
@@ -412,8 +354,7 @@
                         rightFrame.setPage(rightNode);
                         rightFrame.initBuffer(ctx.interiorFrame.getLevel());
                         rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
-                        ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey, ctx,
-                                bufferCache);
+                        ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey, ctx, bufferCache);
                         ctx.interiorFrame.setRightPage(rightPageId);
                     } else {
                         rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
@@ -421,8 +362,7 @@
                         rightFrame.initBuffer((byte) 0);
                         rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
                         ctx.modificationCallback.found(null, tuple);
-                        ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey, ctx,
-                                bufferCache);
+                        ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey, ctx, bufferCache);
                         ctx.leafFrame.setRightPage(rightPageId);
                     }
                     succeeded = true;
@@ -448,8 +388,8 @@
                 ctx.splitKey.setPages(pageId, rightPageId);
                 if (pageId == rootPage) {
                     int newLeftId = freePageManager.takePage(ctx.metaFrame);
-                    ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
-                            true);
+                    ICachedPage newLeftNode =
+                            bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
                     newLeftNode.acquireWriteLatch();
                     succeeded = false;
                     try {
@@ -503,7 +443,7 @@
         }
     }
 
-    private void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+    private void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException {
         boolean succeeded = false;
         boolean writeLatched = false;
         int parentId = ctx.pathList.getLastPageId();
@@ -542,7 +482,7 @@
             if (foundParent) {
                 try {
                     ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, ctx.cmp);
-                } catch (TreeIndexException e) {
+                } catch (Exception e) {
                     if (writeLatched) {
                         parentNode.releaseWriteLatch(true);
                         writeLatched = false;
@@ -571,7 +511,7 @@
         updateParentForInsert(ctx);
     }
 
-    private void findPath(RTreeOpContext ctx) throws TreeIndexException, HyracksDataException {
+    private void findPath(RTreeOpContext ctx) throws HyracksDataException {
         boolean readLatched = false;
         int pageId = rootPage;
         int parentIndex = -1;
@@ -596,7 +536,7 @@
                 ctx.traverseList.moveFirst();
 
                 if (ctx.interiorFrame.isLeaf()) {
-                    throw new TreeIndexException("Error: Failed to re-find parent of a page in the tree.");
+                    throw HyracksDataException.create(ErrorCode.FAILED_TO_RE_FIND_PARENT);
                 }
 
                 if (pageId != rootPage) {
@@ -635,7 +575,7 @@
         }
     }
 
-    private void delete(ITupleReference tuple, RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+    private void delete(ITupleReference tuple, RTreeOpContext ctx) throws HyracksDataException {
         ctx.reset();
         ctx.setTuple(tuple);
         ctx.splitKey.reset();
@@ -764,7 +704,7 @@
     }
 
     private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, RTreeOpContext ctx)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         ctx.reset();
         ctx.cursor = cursor;
 
@@ -828,19 +768,19 @@
         }
 
         @Override
-        public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void insert(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.INSERT);
-            rtree.insert(tuple, ctx);
+            insert(tuple, ctx);
         }
 
         @Override
-        public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void update(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.UPDATE);
             rtree.update(tuple, ctx);
         }
 
         @Override
-        public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void delete(ITupleReference tuple) throws HyracksDataException {
             ctx.setOperation(IndexOperation.DELETE);
             rtree.delete(tuple, ctx);
         }
@@ -852,8 +792,7 @@
         }
 
         @Override
-        public void search(IIndexCursor cursor, ISearchPredicate searchPred)
-                throws HyracksDataException, IndexException {
+        public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
             ctx.setOperation(IndexOperation.SEARCH);
             rtree.search((ITreeIndexCursor) cursor, searchPred, ctx);
         }
@@ -874,45 +813,97 @@
         }
 
         @Override
-        public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+        public void upsert(ITupleReference tuple) throws HyracksDataException {
             throw new UnsupportedOperationException(
                     "The RTree does not support the notion of keys, therefore upsert does not make sense.");
         }
+
+        private void insert(ITupleReference tuple, IIndexOperationContext ictx) throws HyracksDataException {
+            RTreeOpContext ctx = (RTreeOpContext) ictx;
+            int tupleSize = Math.max(ctx.leafFrame.getBytesRequiredToWriteTuple(tuple),
+                    ctx.interiorFrame.getBytesRequiredToWriteTuple(tuple));
+            if (tupleSize > maxTupleSize) {
+                throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
+            }
+            ctx.reset();
+            ctx.setTuple(tuple);
+            ctx.splitKey.reset();
+            ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
+            ctx.splitKey.getRightTuple().setFieldCount(cmpFactories.length);
+            ctx.modificationCallback.before(tuple);
+
+            int maxFieldPos = cmpFactories.length / 2;
+            for (int i = 0; i < maxFieldPos; i++) {
+                int j = maxFieldPos + i;
+                int c = ctx.cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                        tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j),
+                        tuple.getFieldLength(j));
+                if (c > 0) {
+                    throw new IllegalArgumentException(
+                            "The low key point has larger coordinates than the high key point.");
+                }
+            }
+
+            try {
+                ICachedPage leafNode = findLeaf(ctx);
+
+                int pageId = ctx.pathList.getLastPageId();
+                ctx.pathList.moveLast();
+                insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+
+                while (true) {
+                    if (ctx.splitKey.getLeftPageBuffer() != null) {
+                        updateParentForInsert(ctx);
+                    } else {
+                        break;
+                    }
+                }
+            } finally {
+                for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
+                    ICachedPage node = ctx.NSNUpdates.get(i);
+                    ctx.interiorFrame.setPage(node);
+                    ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
+                }
+
+                for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
+                    ICachedPage node = ctx.LSNUpdates.get(i);
+                    ctx.interiorFrame.setPage(node);
+                    ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
+                    node.releaseWriteLatch(true);
+                    bufferCache.unpin(node);
+                }
+            }
+        }
     }
 
     @Override
     public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
-            boolean checkIfEmptyIndex) throws TreeIndexException {
+            boolean checkIfEmptyIndex) throws HyracksDataException {
         // TODO: verifyInput currently does nothing.
-        try {
-            return new RTreeBulkLoader(fillFactor);
-        } catch (HyracksDataException e) {
-            throw new TreeIndexException(e);
-        }
+        return new RTreeBulkLoader(fillFactor);
     }
 
     public class RTreeBulkLoader extends AbstractTreeIndex.AbstractTreeIndexBulkLoader {
         ITreeIndexFrame lowerFrame, prevInteriorFrame;
-        RTreeTypeAwareTupleWriter interiorFrameTupleWriter = ((RTreeTypeAwareTupleWriter) interiorFrame
-                .getTupleWriter());
+        RTreeTypeAwareTupleWriter interiorFrameTupleWriter =
+                ((RTreeTypeAwareTupleWriter) interiorFrame.getTupleWriter());
         ITreeIndexTupleReference mbrTuple = interiorFrame.createTupleReference();
         ByteBuffer mbr;
         List<Integer> prevNodeFrontierPages = new ArrayList<>();
 
-        public RTreeBulkLoader(float fillFactor) throws TreeIndexException, HyracksDataException {
+        public RTreeBulkLoader(float fillFactor) throws HyracksDataException {
             super(fillFactor);
             prevInteriorFrame = interiorFrameFactory.createFrame();
         }
 
         @Override
-        public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+        public void add(ITupleReference tuple) throws HyracksDataException {
             try {
                 int leafFrameTupleSize = leafFrame.getBytesRequiredToWriteTuple(tuple);
                 int interiorFrameTupleSize = interiorFrame.getBytesRequiredToWriteTuple(tuple);
                 int tupleSize = Math.max(leafFrameTupleSize, interiorFrameTupleSize);
                 if (tupleSize > maxTupleSize) {
-                    throw new TreeIndexException("Space required for record (" + tupleSize
-                            + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+                    throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
                 }
 
                 NodeFrontier leafFrontier = nodeFrontiers.get(0);
@@ -942,8 +933,8 @@
                     }
 
                     pagesToWrite.clear();
-                    leafFrontier.page = bufferCache
-                            .confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
+                    leafFrontier.page =
+                            bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
                     leafFrame.setPage(leafFrontier.page);
                     leafFrame.initBuffer((byte) 0);
 
@@ -1035,9 +1026,9 @@
             ((RTreeNSMFrame) lowerFrame).adjustMBR();
 
             if (mbr == null) {
-                int bytesRequired = interiorFrameTupleWriter
-                        .bytesRequired(((RTreeNSMFrame) lowerFrame).getMBRTuples()[0], 0, cmp.getKeyFieldCount())
-                        + ((RTreeNSMInteriorFrame) interiorFrame).getChildPointerSize();
+                int bytesRequired =
+                        interiorFrameTupleWriter.bytesRequired(((RTreeNSMFrame) lowerFrame).getMBRTuples()[0], 0,
+                                cmp.getKeyFieldCount()) + ((RTreeNSMInteriorFrame) interiorFrame).getChildPointerSize();
                 mbr = ByteBuffer.allocate(bytesRequired);
             }
             interiorFrameTupleWriter.writeTupleFields(((RTreeNSMFrame) lowerFrame).getMBRTuples(), 0, mbr, 0);
@@ -1049,8 +1040,8 @@
             // load where finalization can possibly lead to a split
             //TODO: accomplish this without wasting 1 tuple
             int sizeOfTwoTuples = 2 * (mbrTuple.getTupleSize() + RTreeNSMInteriorFrame.childPtrSize);
-            FrameOpSpaceStatus spaceForTwoTuples = (((RTreeNSMInteriorFrame) interiorFrame)
-                    .hasSpaceInsert(sizeOfTwoTuples));
+            FrameOpSpaceStatus spaceForTwoTuples =
+                    (((RTreeNSMInteriorFrame) interiorFrame).hasSpaceInsert(sizeOfTwoTuples));
             if (spaceForTwoTuples != FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE && !toRoot) {
 
                 int finalPageId = freePageManager.takePage(metaFrame);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
index 8988605..38486cd 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
@@ -28,6 +28,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -47,9 +48,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.api.UnsortedInputException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.junit.Test;
@@ -61,7 +59,7 @@
 
     protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
             int[] bloomFilterKeyFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
-            int[] btreeFields, int[] filterFields) throws TreeIndexException, HyracksDataException;
+            int[] btreeFields, int[] filterFields) throws HyracksDataException;
 
     /**
      * Fixed-Length Key,Value Example. Create a tree index with one fixed-length
@@ -80,8 +78,8 @@
         typeTraits[0] = IntegerPointable.TYPE_TRAITS;
         typeTraits[1] = IntegerPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] fieldSerdes =
+                { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         // Declare keys.
         int keyFieldCount = 1;
@@ -102,8 +100,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         int numInserts = 10000;
         for (int i = 0; i < numInserts; i++) {
             int f0 = rnd.nextInt() % numInserts;
@@ -116,7 +114,10 @@
             }
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         long end = System.currentTimeMillis();
@@ -164,8 +165,8 @@
         typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
         typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] fieldSerdes =
+                { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
 
         // Declare keys.
         int keyFieldCount = 1;
@@ -182,8 +183,8 @@
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         String key = "111";
         String data = "XXX";
@@ -263,8 +264,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         int numInserts = 10000;
         for (int i = 0; i < 10000; i++) {
             int f0 = rnd.nextInt() % 2000;
@@ -278,7 +279,10 @@
             }
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         long end = System.currentTimeMillis();
@@ -324,8 +328,8 @@
         typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
         typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] fieldSerdes =
+                { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
 
         // Declare keys.
         int keyFieldCount = 1;
@@ -346,8 +350,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         // Max string length to be generated.
         int maxLength = 10;
         int numInserts = 10000;
@@ -362,7 +366,10 @@
             }
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         long end = System.currentTimeMillis();
@@ -408,8 +415,8 @@
         typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
         typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] fieldSerdes =
+                { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
 
         // Declare keys.
         int keyFieldCount = 1;
@@ -426,8 +433,8 @@
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         // Max string length to be generated.
         int runs = 3;
         for (int run = 0; run < runs; run++) {
@@ -455,7 +462,10 @@
                 try {
                     indexAccessor.insert(tuple);
                     insDone++;
-                } catch (TreeIndexException e) {
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                        throw e;
+                    }
                 }
                 insDoneCmp[i] = insDone;
             }
@@ -474,7 +484,10 @@
                 try {
                     indexAccessor.delete(tuple);
                     delDone++;
-                } catch (TreeIndexException e) {
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                        throw e;
+                    }
                 }
                 if (insDoneCmp[i] != delDone) {
                     if (LOGGER.isLoggable(Level.INFO)) {
@@ -514,8 +527,8 @@
         typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
         typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] fieldSerdes =
+                { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
 
         // Declare keys.
         int keyFieldCount = 1;
@@ -533,8 +546,8 @@
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Inserting into tree...");
         }
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
         int maxLength = 10;
@@ -552,7 +565,10 @@
             }
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         // Print before doing any updates.
@@ -573,11 +589,7 @@
                         LOGGER.info("Updating " + i);
                     }
                 }
-                try {
-                    indexAccessor.update(tuple);
-                } catch (TreeIndexException e) {
-                } catch (UnsupportedOperationException e) {
-                }
+                indexAccessor.update(tuple);
             }
             // Do another scan after a round of updates.
             orderedScan(indexAccessor, fieldSerdes);
@@ -640,8 +652,8 @@
             LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
         }
 
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         // Build low key.
         ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
@@ -693,8 +705,8 @@
         int ins = 1000;
         for (int i = 1; i < ins; i++) {
 
-            ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null,
-                    null);
+            ITreeIndex treeIndex =
+                    createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null, null);
             treeIndex.create();
             treeIndex.activate();
 
@@ -718,19 +730,17 @@
                 TupleUtils.createIntegerTuple(tb, tuple, key, 5);
                 try {
                     bulkLoader.add(tuple);
-                } catch (UnsortedInputException e) {
-                    if (j != i) {
-                        fail("Unexpected exception: " + e.getMessage());
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() == ErrorCode.UNSORTED_LOAD_INPUT || e.getErrorCode() == ErrorCode.DUPLICATE_KEY
+                            || e.getErrorCode() == ErrorCode.DUPLICATE_LOAD_INPUT) {
+                        if (j != i) {
+                            fail("Unexpected exception: " + e.getMessage());
+                        }
+                        // Success.
+                        break;
+                    } else {
+                        throw e;
                     }
-                    // Success.
-
-                    break;
-                } catch (TreeIndexDuplicateKeyException e2) {
-                    if (j != i) {
-                        fail("Unexpected exception: " + e2.getMessage());
-                    }
-                    // Success.
-                    break;
                 }
             }
             treeIndex.deactivate();
@@ -765,8 +775,8 @@
                 LOGGER.info("Disk-Order Scan:");
             }
             ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
-            TreeIndexDiskOrderScanCursor diskOrderCursor = (TreeIndexDiskOrderScanCursor) treeIndexAccessor
-                    .createDiskOrderScanCursor();
+            TreeIndexDiskOrderScanCursor diskOrderCursor =
+                    (TreeIndexDiskOrderScanCursor) treeIndexAccessor.createDiskOrderScanCursor();
             treeIndexAccessor.diskOrderScan(diskOrderCursor);
             try {
                 while (diskOrderCursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
index b9c0286..ec56549 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -34,7 +34,6 @@
 import org.apache.hyracks.storage.am.common.IndexMultiThreadTestDriver;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.junit.Test;
 
@@ -56,7 +55,7 @@
     protected abstract void tearDown() throws HyracksDataException;
 
     protected abstract IIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException, HyracksDataException;
+            int[] bloomFilterKeyFields) throws HyracksDataException;
 
     protected abstract IIndexTestWorkerFactory getWorkerFactory();
 
@@ -65,7 +64,7 @@
     protected abstract String getIndexTypeName();
 
     protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf,
-            String dataMsg) throws InterruptedException, TreeIndexException, HyracksDataException {
+            String dataMsg) throws InterruptedException, HyracksDataException {
         setUp();
 
         if (LOGGER.isLoggable(Level.INFO)) {
@@ -88,8 +87,8 @@
 
         // 4 batches per thread.
         int batchSize = (NUM_OPERATIONS / numThreads) / 4;
-        IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops,
-                conf.opProbs);
+        IndexMultiThreadTestDriver driver =
+                new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
         driver.init();
         long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
         index.validate();
@@ -103,7 +102,7 @@
     }
 
     @Test
-    public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksDataException {
+    public void oneIntKeyAndValue() throws InterruptedException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE };
         int numKeys = 1;
@@ -116,9 +115,9 @@
     }
 
     @Test
-    public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksDataException {
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
-                new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
+    public void oneStringKeyAndValue() throws InterruptedException, HyracksDataException {
+        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+                new UTF8StringSerializerDeserializer() };
         int numKeys = 1;
         String dataMsg = "One String Key And Value";
 
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
index 2a16ebe36..970f49d 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -32,6 +32,7 @@
 import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
@@ -47,8 +48,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 @SuppressWarnings("rawtypes")
@@ -58,13 +57,13 @@
     private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
             ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
         for (int i = 0; i < fieldSerdes.length; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
-                    actual.getFieldLength(i));
+            ByteArrayInputStream inStream =
+                    new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i), actual.getFieldLength(i));
             DataInput dataIn = new DataInputStream(inStream);
             Object actualObj = fieldSerdes[i].deserialize(dataIn);
             if (!actualObj.equals(expected.getField(i))) {
-                 fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.getField(i)
-                         + "\nActual  : " + actualObj);
+                fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.getField(i)
+                        + "\nActual  : " + actualObj);
             }
         }
     }
@@ -81,11 +80,11 @@
         CheckTuple high = checkTuples.floor(highKey);
         if (low == null || high == null) {
             // Must be empty.
-            return new TreeSet<CheckTuple>();
+            return new TreeSet<>();
         }
         if (high.compareTo(low) < 0) {
             // Must be empty.
-            return new TreeSet<CheckTuple>();
+            return new TreeSet<>();
         }
         return checkTuples.subSet(low, true, high, true);
     }
@@ -99,20 +98,20 @@
         MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
         MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
         IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor(false);
-        RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
-                highKeyCmp);
+        RangePredicate rangePred =
+                new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
         ctx.getIndexAccessor().search(searchCursor, rangePred);
         // Get the subset of elements from the expected set within given key
         // range.
         CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
-        CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
-                highKeyCmp.getKeyFieldCount());
+        CheckTuple highKeyCheck =
+                createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(), highKeyCmp.getKeyFieldCount());
         SortedSet<CheckTuple> expectedSubset = null;
         if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
                 || highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
             // Searching on a key prefix (low key or high key or both).
-            expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
-                    highKeyCheck);
+            expectedSubset =
+                    getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck, highKeyCheck);
         } else {
             // Searching on all key fields.
             expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
@@ -189,7 +188,7 @@
         int fieldCount = ctx.getFieldCount();
         int numKeyFields = ctx.getKeyFieldCount();
         int[] fieldValues = new int[ctx.getFieldCount()];
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
@@ -214,7 +213,7 @@
         int fieldCount = ctx.getFieldCount();
         int numKeyFields = ctx.getKeyFieldCount();
         String[] fieldValues = new String[fieldCount];
-        TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
+        TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<>();
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
             for (int j = 0; j < numKeyFields; j++) {
@@ -238,7 +237,7 @@
     }
 
     public static void insertCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         int fieldCount = ctx.getFieldCount();
         int numTuples = checkTuples.size();
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
@@ -283,8 +282,11 @@
                 // Set expected values. Do this only after insertion succeeds
                 // because we ignore duplicate keys.
                 ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexDuplicateKeyException e) {
+            } catch (HyracksDataException e) {
                 // Ignore duplicate key insertions.
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
     }
@@ -320,7 +322,7 @@
         int fieldCount = ctx.getFieldCount();
         int numKeyFields = ctx.getKeyFieldCount();
         String[] fieldValues = new String[fieldCount];
-        TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
+        TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<>();
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
             for (int j = 0; j < numKeyFields; j++) {
@@ -343,6 +345,7 @@
         }
     }
 
+    @Override
     public void upsertIntTuples(IIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
         OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
         int fieldCount = ctx.getFieldCount();
@@ -351,7 +354,7 @@
         // Scale range of values according to number of keys.
         // For example, for 2 keys we want the square root of numTuples, for 3
         // keys the cube root of numTuples, etc.
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
             setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -413,9 +416,9 @@
     }
 
     public CheckTuple createStringCheckTuple(String[] fieldValues, int numKeyFields) {
-        CheckTuple<String> checkTuple = new CheckTuple<String>(fieldValues.length, numKeyFields);
+        CheckTuple<String> checkTuple = new CheckTuple<>(fieldValues.length, numKeyFields);
         for (String s : fieldValues) {
-            checkTuple.appendField((String) s);
+            checkTuple.appendField(s);
         }
         return checkTuple;
     }
@@ -475,7 +478,7 @@
 
     @Override
     protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
-        CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldValues.length, numKeyFields);
+        CheckTuple<Integer> checkTuple = new CheckTuple<>(fieldValues.length, numKeyFields);
         for (int v : fieldValues) {
             checkTuple.appendField(v);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java
index c1538f2..b6b19cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
 
@@ -39,8 +38,8 @@
 
     protected final IIndexAccessor indexAccessor;
 
-    public AbstractIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches)
-            throws HyracksDataException {
+    public AbstractIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
+            int numBatches) throws HyracksDataException {
         this.dataGen = dataGen;
         this.opSelector = opSelector;
         this.numBatches = numBatches;
@@ -65,7 +64,7 @@
         }
     }
 
-    protected void consumeCursorTuples(IIndexCursor cursor) throws HyracksDataException, IndexException {
+    protected void consumeCursorTuples(IIndexCursor cursor) throws HyracksDataException {
         try {
             while (cursor.hasNext()) {
                 cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java
index 144e9ea..ddbe73a 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java
@@ -22,8 +22,8 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 
+@FunctionalInterface
 public interface ITreeIndexTestWorker {
-    void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException;
+    void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
index 32dd15d..a765637 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
@@ -23,7 +23,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 
 @SuppressWarnings("rawtypes")
@@ -49,8 +48,8 @@
         index.activate();
     }
 
-    public long[] run(int numThreads, int numRepeats, int numOps, int batchSize) throws InterruptedException,
-            TreeIndexException, HyracksDataException {
+    public long[] run(int numThreads, int numRepeats, int numOps, int batchSize)
+            throws InterruptedException, HyracksDataException {
         int numBatches = (batchSize < 1 ? numOps : numOps / batchSize);
         if (numBatches < numThreads) {
             numThreads = numBatches;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
index 94c0ab4..b801715 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -32,6 +32,7 @@
 import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
@@ -41,8 +42,6 @@
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 
 @SuppressWarnings("rawtypes")
 public abstract class TreeIndexTestUtils {
@@ -88,8 +87,8 @@
         CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
         int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
         for (int i = 0; i < fieldCount; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
+            ByteArrayInputStream inStream =
+                    new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
             DataInput dataIn = new DataInputStream(inStream);
             Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
             checkTuple.appendField(fieldObj);
@@ -122,8 +121,8 @@
                 while (diskOrderCursor.hasNext()) {
                     diskOrderCursor.next();
                     ITupleReference tuple = diskOrderCursor.getTuple();
-                    CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
-                            ctx.getKeyFieldCount());
+                    CheckTuple checkTuple =
+                            createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(), ctx.getKeyFieldCount());
                     if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
                         fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
                     }
@@ -140,9 +139,8 @@
             } finally {
                 try {
                     diskOrderCursor.close();
-                }
-                catch(Exception ex){
-                    LOGGER.log(Level.WARNING,"Error during scan cursor close",ex);
+                } catch (Exception ex) {
+                    LOGGER.log(Level.WARNING, "Error during scan cursor close", ex);
                 }
             }
         } catch (UnsupportedOperationException e) {
@@ -168,7 +166,7 @@
         // Scale range of values according to number of keys.
         // For example, for 2 keys we want the square root of numTuples, for 3
         // keys the cube root of numTuples, etc.
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
             setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -183,9 +181,12 @@
             try {
                 ctx.getIndexAccessor().insert(ctx.getTuple());
                 ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
                 // We set expected values only after insertion succeeds because
                 // we ignore duplicate keys.
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
     }
@@ -198,7 +199,7 @@
         // Scale range of values according to number of keys.
         // For example, for 2 keys we want the square root of numTuples, for 3
         // keys the cube root of numTuples, etc.
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
             setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -213,9 +214,12 @@
             try {
                 ctx.getIndexAccessor().upsert(ctx.getTuple());
                 ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
                 // We set expected values only after insertion succeeds because
                 // we ignore duplicate keys.
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
     }
@@ -225,7 +229,7 @@
         int fieldCount = ctx.getFieldCount();
         int numKeyFields = ctx.getKeyFieldCount();
         int[] fieldValues = new int[ctx.getFieldCount()];
-        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
@@ -246,7 +250,7 @@
     }
 
     public static void bulkLoadCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         int fieldCount = ctx.getFieldCount();
         int numTuples = checkTuples.size();
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
@@ -257,7 +261,7 @@
         for (CheckTuple checkTuple : checkTuples) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 //if (c % (numTuples / 10) == 0) {
-                    LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
+                LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
                 //}
             }
             createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes());
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
index ba55a1d..cc8445a 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -26,6 +26,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.DoublePointable;
@@ -45,7 +46,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -69,10 +69,9 @@
 
     protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
             IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
-            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
-            int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
-            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException,
-            HyracksDataException;
+            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType, int[] rtreeFields,
+            int[] btreeFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
+            int[] filterFields) throws HyracksDataException;
 
     /**
      * Two Dimensions Example. Create an RTree index of two dimensions, where
@@ -96,10 +95,10 @@
         typeTraits[4] = IntegerPointable.TYPE_TRAITS;
         typeTraits[5] = IntegerPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] fieldSerdes =
+                { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                        IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                        IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         // Declare RTree keys.
         int rtreeKeyFieldCount = 4;
@@ -136,11 +135,11 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
 
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RTREE, null, btreeFields, null, null, null);
         treeIndex.create();
         treeIndex.activate();
 
@@ -150,8 +149,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         int numInserts = 10000;
         for (int i = 0; i < numInserts; i++) {
             int p1x = rnd.nextInt();
@@ -166,7 +165,10 @@
                     Math.max(p1y, p2y), pk1, pk2);
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         long end = System.currentTimeMillis();
@@ -246,20 +248,20 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
 
         //2
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RTREE, null, btreeFields, null, null, null);
 
         treeIndex.create();
         treeIndex.activate();
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         int p1x = rnd.nextInt();
         int p1y = rnd.nextInt();
@@ -386,19 +388,19 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
 
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RSTARTREE, null, btreeFields, null, null, null);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RSTARTREE, null, btreeFields, null, null, null);
 
         treeIndex.create();
         treeIndex.activate();
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         int p1x = rnd.nextInt();
         int p1y = rnd.nextInt();
@@ -535,12 +537,12 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, DoublePointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, DoublePointable.FACTORY);
 
         //4
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RTREE, null, btreeFields, null, null, null);
         treeIndex.create();
         treeIndex.activate();
 
@@ -550,8 +552,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         int numInserts = 10000;
         for (int i = 0; i < numInserts; i++) {
             double p1x = rnd.nextDouble();
@@ -567,7 +569,10 @@
                     Math.max(p1x, p2x), Math.max(p1y, p2y), Math.max(p1z, p2z), pk);
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         long end = System.currentTimeMillis();
@@ -643,18 +648,18 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
 
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RTREE, null, btreeFields, null, null, null);
         treeIndex.create();
         treeIndex.activate();
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
 
         int runs = 3;
         for (int run = 0; run < runs; run++) {
@@ -689,7 +694,10 @@
                         Math.max(p1y, p2y), pk);
                 try {
                     indexAccessor.insert(tuple);
-                } catch (TreeIndexException e) {
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                        throw e;
+                    }
                 }
                 insDoneCmp[i] = insDone;
             }
@@ -703,7 +711,10 @@
                 try {
                     indexAccessor.delete(tuple);
                     delDone++;
-                } catch (TreeIndexException e) {
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                        throw e;
+                    }
                 }
                 if (insDoneCmp[i] != delDone) {
                     if (LOGGER.isLoggable(Level.INFO)) {
@@ -779,12 +790,12 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
 
         //6
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RTREE, null, btreeFields, null, null, null);
         treeIndex.create();
         treeIndex.activate();
 
@@ -817,8 +828,8 @@
             LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
         }
 
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
 
         // Build key.
         ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
@@ -858,8 +869,8 @@
                 LOGGER.info("Disk-Order Scan:");
             }
             ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
-            TreeIndexDiskOrderScanCursor diskOrderCursor = (TreeIndexDiskOrderScanCursor) treeIndexAccessor
-                    .createDiskOrderScanCursor();
+            TreeIndexDiskOrderScanCursor diskOrderCursor =
+                    (TreeIndexDiskOrderScanCursor) treeIndexAccessor.createDiskOrderScanCursor();
             treeIndexAccessor.diskOrderScan(diskOrderCursor);
             try {
                 while (diskOrderCursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
index d6f07d9..53245ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -37,7 +37,6 @@
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest.RTreeType;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -72,7 +71,7 @@
     protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
             IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
             IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType, int[] btreeFields)
-            throws TreeIndexException, HyracksDataException;
+            throws HyracksDataException;
 
     protected abstract IIndexTestWorkerFactory getWorkerFactory();
 
@@ -82,8 +81,7 @@
 
     protected void runTest(ISerializerDeserializer[] fieldSerdes,
             IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType,
-            int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException,
-            TreeIndexException {
+            int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException {
         setUp();
 
         if (LOGGER.isLoggable(Level.INFO)) {
@@ -116,8 +114,8 @@
         // 4 batches per thread.
         int batchSize = (NUM_OPERATIONS / numThreads) / 4;
 
-        IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops,
-                conf.opProbs);
+        IndexMultiThreadTestDriver driver =
+                new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
         driver.init();
         long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
         driver.deinit();
@@ -130,14 +128,14 @@
     }
 
     @Test
-    public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksDataException, TreeIndexException {
+    public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
 
         String dataMsg = "Two Dimensions Of Integer Values";
 
@@ -156,8 +154,8 @@
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
 
         int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
 
         String dataMsg = "Two Dimensions Of Double Values";
 
@@ -171,7 +169,7 @@
     }
 
     @Test
-    public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksDataException, TreeIndexException {
+    public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
@@ -179,8 +177,8 @@
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
 
         int numKeys = 8;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
 
         String dataMsg = "Four Dimensions Of Double Values";
 
@@ -193,7 +191,7 @@
     }
 
     @Test
-    public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException, TreeIndexException {
+    public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException {
         if (!testRstarPolicy) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Integer Keys.");
@@ -206,8 +204,8 @@
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
 
         String dataMsg = "Two Dimensions Of Integer Values";
 
@@ -233,8 +231,8 @@
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
 
         int numKeys = 4;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
 
         String dataMsg = "Two Dimensions Of Double Values";
 
@@ -248,7 +246,7 @@
     }
 
     @Test
-    public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException, TreeIndexException {
+    public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException {
         if (!testRstarPolicy) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Ignoring RTree Multithread Test With Four Dimensions With Double Keys.");
@@ -263,8 +261,8 @@
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
 
         int numKeys = 8;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                numKeys, DoublePointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
 
         String dataMsg = "Four Dimensions Of Double Values";
 
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
index 13a4fcb..301b448 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -28,6 +28,7 @@
 import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -37,7 +38,6 @@
 import org.apache.hyracks.storage.am.common.TreeIndexTestUtils;
 import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.util.HashMultiSet;
 import org.apache.hyracks.storage.am.rtree.impls.SearchPredicate;
@@ -53,7 +53,7 @@
     // Create a new ArrayList containing the elements satisfying the search key
     public HashMultiSet<RTreeCheckTuple> getRangeSearchExpectedResults(Collection<RTreeCheckTuple> checkTuples,
             RTreeCheckTuple key) {
-        HashMultiSet<RTreeCheckTuple> expectedResult = new HashMultiSet<RTreeCheckTuple>();
+        HashMultiSet<RTreeCheckTuple> expectedResult = new HashMultiSet<>();
         Iterator<RTreeCheckTuple> iter = checkTuples.iterator();
         while (iter.hasNext()) {
             RTreeCheckTuple t = iter.next();
@@ -77,8 +77,8 @@
 
         // Get the subset of elements from the expected set within given key
         // range.
-        RTreeCheckTuple keyCheck = (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(),
-                cmp.getKeyFieldCount());
+        RTreeCheckTuple keyCheck =
+                (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(), cmp.getKeyFieldCount());
 
         HashMultiSet<RTreeCheckTuple> expectedResult = null;
 
@@ -94,7 +94,7 @@
         // Scale range of values according to number of keys.
         // For example, for 2 keys we want the square root of numTuples, for 3
         // keys the cube root of numTuples, etc.
-        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
             setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -109,10 +109,12 @@
             try {
                 ctx.getIndexAccessor().insert(ctx.getTuple());
                 ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
                 // We set expected values only after insertion succeeds because
-                // we
-                // ignore duplicate keys.
+                // we ignore duplicate keys.
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
     }
@@ -139,7 +141,7 @@
 
     @SuppressWarnings("unchecked")
     protected CheckTuple createDoubleCheckTuple(double[] fieldValues, int numKeyFields) {
-        RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<Double>(fieldValues.length, numKeyFields);
+        RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<>(fieldValues.length, numKeyFields);
         for (double v : fieldValues) {
             checkTuple.appendField(v);
         }
@@ -151,7 +153,7 @@
         int fieldCount = ctx.getFieldCount();
         int numKeyFields = ctx.getKeyFieldCount();
         double[] fieldValues = new double[ctx.getFieldCount()];
-        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+        double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
         Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
         for (int i = 0; i < numTuples; i++) {
             // Set keys.
@@ -178,8 +180,8 @@
             while (cursor.hasNext()) {
                 cursor.next();
                 ITupleReference tuple = cursor.getTuple();
-                RTreeCheckTuple checkTuple = (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes,
-                        keyFieldCount);
+                RTreeCheckTuple checkTuple =
+                        (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes, keyFieldCount);
                 if (!checkTuples.contains(checkTuple)) {
                     fail("Scan or range search returned unexpected answer: " + checkTuple.toString());
                 }
@@ -211,7 +213,7 @@
     @SuppressWarnings("unchecked")
     @Override
     protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
-        RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<Integer>(fieldValues.length, numKeyFields);
+        RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<>(fieldValues.length, numKeyFields);
         for (int v : fieldValues) {
             checkTuple.appendField(v);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
index 3f71dd9..f44dee2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
@@ -28,12 +28,6 @@
 import java.util.TreeSet;
 import java.util.logging.Level;
 
-import org.apache.hyracks.storage.am.common.api.*;
-import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
 import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -47,7 +41,6 @@
 import org.apache.hyracks.dataflow.common.utils.TupleUtils;
 import org.apache.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import org.apache.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import org.apache.hyracks.storage.am.btree.impls.BTree;
@@ -56,10 +49,18 @@
 import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import org.apache.hyracks.storage.am.common.TestOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
 import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
 public class BTreeSearchCursorTest extends AbstractBTreeTest {
     private final int fieldCount = 2;
@@ -68,6 +69,7 @@
     private final ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
     private final Random rnd = new Random(50);
 
+    @Override
     @Before
     public void setUp() throws HyracksDataException {
         super.setUp();
@@ -104,14 +106,14 @@
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
 
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        ITreeIndexAccessor indexAccessor =
+                btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         // generate keys
         int numKeys = 50;
         int maxKey = 1000;
-        TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
-        ArrayList<Integer> keys = new ArrayList<Integer>();
+        TreeSet<Integer> uniqueKeys = new TreeSet<>();
+        ArrayList<Integer> keys = new ArrayList<>();
         while (uniqueKeys.size() < numKeys) {
             int key = rnd.nextInt() % maxKey;
             uniqueKeys.add(key);
@@ -128,7 +130,6 @@
 
             try {
                 indexAccessor.insert(tuple);
-            } catch (BTreeException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
@@ -138,14 +139,14 @@
         int maxSearchKey = 100;
 
         // forward searches
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                false, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
 
         btree.deactivate();
         btree.destroy();
@@ -181,13 +182,13 @@
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
 
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        ITreeIndexAccessor indexAccessor =
+                btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         // generate keys
         int numKeys = 50;
         int maxKey = 10;
-        ArrayList<Integer> keys = new ArrayList<Integer>();
+        ArrayList<Integer> keys = new ArrayList<>();
         for (int i = 0; i < numKeys; i++) {
             int k = rnd.nextInt() % maxKey;
             keys.add(k);
@@ -202,7 +203,6 @@
 
             try {
                 indexAccessor.insert(tuple);
-            } catch (BTreeException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
@@ -212,14 +212,14 @@
         int maxSearchKey = 100;
 
         // forward searches
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                false, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
 
         btree.deactivate();
         btree.destroy();
@@ -255,13 +255,13 @@
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
 
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        ITreeIndexAccessor indexAccessor =
+                btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         // generate keys
         int numKeys = 50;
         int maxKey = 10;
-        ArrayList<Integer> keys = new ArrayList<Integer>();
+        ArrayList<Integer> keys = new ArrayList<>();
         for (int i = 0; i < numKeys; i++) {
             int k = rnd.nextInt() % maxKey;
             keys.add(k);
@@ -276,7 +276,6 @@
 
             try {
                 indexAccessor.insert(tuple);
-            } catch (BTreeException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
@@ -286,14 +285,14 @@
         int maxSearchKey = 100;
 
         // forward searches
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
-                true, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                false, false));
-        Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
-                true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
+        Assert.assertTrue(
+                performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
 
         btree.deactivate();
         btree.destroy();
@@ -310,8 +309,8 @@
         searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
         MultiComparator searchCmp = new MultiComparator(searchCmps);
 
-        RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp,
-                searchCmp);
+        RangePredicate rangePred =
+                new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp, searchCmp);
         return rangePred;
     }
 
@@ -319,10 +318,12 @@
             boolean lowKeyInclusive, boolean highKeyInclusive) {
 
         // special cases
-        if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
+        if (lk == hk && (!lowKeyInclusive || !highKeyInclusive)) {
             return;
-        if (lk > hk)
+        }
+        if (lk > hk) {
             return;
+        }
 
         for (int i = 0; i < keys.size(); i++) {
             if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
@@ -341,8 +342,8 @@
             IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean lowKeyInclusive,
             boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
 
-        ArrayList<Integer> results = new ArrayList<Integer>();
-        ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+        ArrayList<Integer> results = new ArrayList<>();
+        ArrayList<Integer> expectedResults = new ArrayList<>();
 
         for (int i = minKey; i < maxKey; i++) {
             for (int j = minKey; j < maxKey; j++) {
@@ -355,8 +356,8 @@
 
                 ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
                 RangePredicate rangePred = createRangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive);
-                ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                        TestOperationCallback.INSTANCE);
+                ITreeIndexAccessor indexAccessor =
+                        btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
                 indexAccessor.search(rangeCursor, rangePred);
 
                 try {
@@ -381,15 +382,17 @@
                     if (expectedResults.size() > 0) {
                         char l, u;
 
-                        if (lowKeyInclusive)
+                        if (lowKeyInclusive) {
                             l = '[';
-                        else
+                        } else {
                             l = '(';
+                        }
 
-                        if (highKeyInclusive)
+                        if (highKeyInclusive) {
                             u = ']';
-                        else
+                        } else {
                             u = ')';
+                        }
 
                         if (LOGGER.isLoggable(Level.INFO)) {
                             LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
index e4c1449..ee7e968 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -22,9 +22,6 @@
 import java.util.Random;
 import java.util.logging.Level;
 
-import org.apache.hyracks.storage.am.common.api.*;
-import org.junit.Test;
-
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
 import org.apache.hyracks.api.comm.VSizeFrame;
@@ -33,6 +30,8 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -48,6 +47,10 @@
 import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import org.apache.hyracks.storage.am.common.TestOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
 import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
@@ -58,6 +61,7 @@
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.test.support.TestUtils;
+import org.junit.Test;
 
 @SuppressWarnings("rawtypes")
 public class BTreeStatsTest extends AbstractBTreeTest {
@@ -97,8 +101,8 @@
 
         IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory,
-                cmpFactories, fieldCount, harness.getFileReference());
+        BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories,
+                fieldCount, harness.getFileReference());
         btree.create();
         btree.activate();
 
@@ -116,15 +120,15 @@
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] recDescSers =
+                { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
         RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
         IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
         accessor.reset(frame.getBuffer());
         FrameTupleReference tuple = new FrameTupleReference();
 
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        ITreeIndexAccessor indexAccessor =
+                btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         // 10000
         for (int i = 0; i < 100000; i++) {
 
@@ -151,22 +155,24 @@
 
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    e.printStackTrace();
+                    throw e;
+                }
             }
         }
 
         int fileId = fmp.lookupFileId(harness.getFileReference());
-        TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
-                btree.getRootPageId());
+        TreeIndexStatsGatherer statsGatherer =
+                new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId, btree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("\n" + stats.toString());
         }
 
-        TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager,
-                fileId);
+        TreeIndexBufferCacheWarmup bufferCacheWarmup =
+                new TreeIndexBufferCacheWarmup(bufferCache, freePageManager, fileId);
         bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
 
         btree.deactivate();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
index 8fc5a09..78023a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
@@ -21,13 +21,11 @@
 import java.util.Random;
 import java.util.logging.Level;
 
-import org.apache.hyracks.storage.am.common.api.*;
-import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
-import org.junit.Test;
-
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -44,9 +42,15 @@
 import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import org.apache.hyracks.storage.am.common.TestOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
 import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.junit.Test;
 
 public class BTreeUpdateSearchTest extends AbstractBTreeTest {
 
@@ -67,8 +71,8 @@
         cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
         @SuppressWarnings("rawtypes")
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] recDescSers =
+                { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -94,8 +98,8 @@
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference insertTuple = new ArrayTupleReference();
-        ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        ITreeIndexAccessor indexAccessor =
+                btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
 
         int numInserts = 10000;
         for (int i = 0; i < numInserts; i++) {
@@ -111,9 +115,11 @@
 
             try {
                 indexAccessor.insert(insertTuple);
-            } catch (TreeIndexException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
+            } catch (HyracksDataException hde) {
+                if (hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    hde.printStackTrace();
+                    throw hde;
+                }
             }
         }
         long end = System.currentTimeMillis();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
index 4420fb0..d284193 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -23,9 +23,6 @@
 import java.util.Random;
 import java.util.logging.Level;
 
-import org.junit.Assert;
-import org.junit.Test;
-
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
 import org.apache.hyracks.api.comm.VSizeFrame;
@@ -43,7 +40,6 @@
 import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
 import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
@@ -54,6 +50,8 @@
 import org.apache.hyracks.storage.common.buffercache.ICachedPage;
 import org.apache.hyracks.storage.common.file.BufferedFileHandle;
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
+import org.junit.Assert;
+import org.junit.Test;
 
 public class FieldPrefixNSMTest extends AbstractBTreeTest {
 
@@ -172,8 +170,6 @@
                 try {
                     int targetTupleIndex = frame.findInsertTupleIndex(tuple);
                     frame.insert(tuple, targetTupleIndex);
-                } catch (BTreeException e) {
-                    e.printStackTrace();
                 } catch (Exception e) {
                     e.printStackTrace();
                 }
@@ -206,8 +202,8 @@
                     }
                 }
 
-                ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2],
-                        false);
+                ITupleReference tuple =
+                        createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
                 try {
                     int tupleIndex = frame.findDeleteTupleIndex(tuple);
                     frame.delete(tuple, tupleIndex);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
index 77ad13c..3734f08 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
@@ -19,11 +19,11 @@
 
 package org.apache.hyracks.storage.am.btree.multithread;
 
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
 import org.apache.hyracks.storage.am.btree.impls.BTree;
 import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
 import org.apache.hyracks.storage.am.common.AbstractIndexTestWorker;
@@ -31,10 +31,7 @@
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 public class BTreeTestWorker extends AbstractIndexTestWorker {
@@ -53,7 +50,7 @@
     }
 
     @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
         BTree.BTreeAccessor accessor = (BTree.BTreeAccessor) indexAccessor;
         ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
         ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
@@ -64,8 +61,11 @@
             case INSERT:
                 try {
                     accessor.insert(tuple);
-                } catch (TreeIndexDuplicateKeyException e) {
-                    // Ignore duplicate keys, since we get random tuples.
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                        // Ignore duplicate keys, since we get random tuples.
+                        throw e;
+                    }
                 }
                 break;
 
@@ -78,18 +78,25 @@
                 deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
                 try {
                     accessor.delete(deleteTuple);
-                } catch (TreeIndexNonExistentKeyException e) {
-                    // Ignore non-existant keys, since we get random tuples.
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                        // Ignore non-existant keys, since we get random tuples.
+                        throw e;
+                    }
                 }
                 break;
 
             case UPDATE:
                 try {
                     accessor.update(tuple);
-                } catch (TreeIndexNonExistentKeyException e) {
+                } catch (HyracksDataException e) {
                     // Ignore non-existant keys, since we get random tuples.
-                } catch (BTreeNotUpdateableException e) {
-                    // Ignore not updateable exception due to numKeys == numFields.
+                    if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY
+                            && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
+                        // Ignore non-existant keys, since we get random tuples.
+                        // Ignore not updateable exception due to numKeys == numFields.
+                        throw e;
+                    }
                 }
                 break;
 
@@ -126,7 +133,7 @@
         }
     }
 
-    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
+    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
         try {
             while (cursor.hasNext()) {
                 cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
index 0cdcf48..4b28e25 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
@@ -35,7 +35,6 @@
 import org.apache.hyracks.storage.am.common.TestOperationCallback;
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
 import org.apache.hyracks.storage.am.lsm.btree.utils.LSMBTreeUtil;
 import org.junit.After;
@@ -48,14 +47,13 @@
     @Override
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
             int[] bloomFilterKeyFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
-            int[] btreeFields, int[] filterFields) throws TreeIndexException, HyracksDataException {
-        return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
-                .getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, cmpFactories,
-                bloomFilterKeyFields, harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+            int[] btreeFields, int[] filterFields) throws HyracksDataException {
+        return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+                harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+                cmpFactories, bloomFilterKeyFields, harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
                 harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(), true,
-                filterTypeTraits, filterCmpFactories, btreeFields, filterFields, true, harness
-                        .getMetadataPageManagerFactory());
+                filterTypeTraits, filterCmpFactories, btreeFields, filterFields, true,
+                harness.getMetadataPageManagerFactory());
     }
 
     @Before
@@ -83,8 +81,8 @@
         typeTraits[0] = IntegerPointable.TYPE_TRAITS;
         typeTraits[1] = IntegerPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] fieldSerdes =
+                { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         // Declare keys.
         int keyFieldCount = 1;
@@ -96,8 +94,8 @@
         bloomFilterKeyFields[0] = 0;
 
         ITypeTraits[] filterTypeTraits = { IntegerPointable.TYPE_TRAITS };
-        IBinaryComparatorFactory[] filterCmpFactories = { PointableBinaryComparatorFactory.of(
-                IntegerPointable.FACTORY) };
+        IBinaryComparatorFactory[] filterCmpFactories =
+                { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
         int[] filterFields = { 1 };
         int[] btreeFields = { 1 };
         ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, filterTypeTraits,
@@ -111,8 +109,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
-                TestOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
         int numInserts = 10000;
         for (int i = 0; i < numInserts; i++) {
             int f0 = rnd.nextInt() % numInserts;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
index cfd2196..9abc321 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
 import org.apache.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
 import org.apache.hyracks.storage.am.lsm.btree.utils.LSMBTreeUtil;
@@ -52,9 +51,9 @@
 
     @Override
     protected ITreeIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
-            int[] bloomFilterKeyFields) throws TreeIndexException, HyracksDataException {
-        return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
-                .getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+            int[] bloomFilterKeyFields) throws HyracksDataException {
+        return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+                harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
                 cmpFactories, bloomFilterKeyFields, harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
                 harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(), true, null,
                 null, null, null, true, harness.getMetadataPageManagerFactory());
@@ -71,41 +70,41 @@
 
         // Insert only workload.
         TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
+        workloadConfs
+                .add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper.getUniformProbDist(insertOnlyOps.length)));
 
         // Insert and merge workload.
         TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertMergeOps.length)));
+        workloadConfs
+                .add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper.getUniformProbDist(insertMergeOps.length)));
 
         // Inserts mixed with point searches and scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
-                TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
+        TestOperation[] insertSearchOnlyOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN };
+        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps,
+                ProbabilityHelper.getUniformProbDist(insertSearchOnlyOps.length)));
 
         // Inserts, updates, and deletes.
-        TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateOps.length)));
+        TestOperation[] insertDeleteUpdateOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE };
+        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps,
+                ProbabilityHelper.getUniformProbDist(insertDeleteUpdateOps.length)));
 
         // Inserts, updates, deletes and merges.
         TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
                 TestOperation.UPDATE, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateMergeOps.length)));
+        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps,
+                ProbabilityHelper.getUniformProbDist(insertDeleteUpdateMergeOps.length)));
 
         // All operations except merge.
         TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
                 TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
-                .getUniformProbDist(allNoMergeOps.length)));
+        workloadConfs
+                .add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
 
         // All operations.
-        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
+        TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE,
+                TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
         workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
 
         return workloadConfs;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
index 56f40b0..e849828 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
@@ -19,21 +19,18 @@
 
 package org.apache.hyracks.storage.am.lsm.btree.multithread;
 
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
 import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
 import org.apache.hyracks.storage.am.common.AbstractIndexTestWorker;
 import org.apache.hyracks.storage.am.common.TestOperationSelector;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree;
 import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree.LSMBTreeAccessor;
@@ -54,7 +51,7 @@
     }
 
     @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
         LSMBTreeAccessor accessor = (LSMBTreeAccessor) indexAccessor;
         IIndexCursor searchCursor = accessor.createSearchCursor(false);
         MultiComparator cmp = accessor.getMultiComparator();
@@ -64,8 +61,11 @@
             case INSERT:
                 try {
                     accessor.insert(tuple);
-                } catch (TreeIndexDuplicateKeyException e) {
-                    // Ignore duplicate keys, since we get random tuples.
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                        // Ignore duplicate keys, since we get random tuples.
+                        throw e;
+                    }
                 }
                 break;
 
@@ -78,18 +78,24 @@
                 deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
                 try {
                     accessor.delete(deleteTuple);
-                } catch (TreeIndexNonExistentKeyException e) {
+                } catch (HyracksDataException e) {
                     // Ignore non-existant keys, since we get random tuples.
+                    if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+                        throw e;
+                    }
                 }
                 break;
 
             case UPDATE:
                 try {
                     accessor.update(tuple);
-                } catch (TreeIndexNonExistentKeyException e) {
-                    // Ignore non-existant keys, since we get random tuples.
-                } catch (BTreeNotUpdateableException e) {
-                    // Ignore not updateable exception due to numKeys == numFields.
+                } catch (HyracksDataException e) {
+                    if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY
+                            && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
+                        // Ignore non-existant keys, since we get random tuples.
+                        // Ignore not updateable exception due to numKeys == numFields.
+                        throw e;
+                    }
                 }
                 break;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
index 62c0c43..f3e8ad8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
@@ -22,7 +22,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
@@ -32,7 +31,7 @@
     protected final float fillFactor;
 
     public BTreeBulkLoadRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws BTreeException, HyracksDataException {
+            IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws HyracksDataException {
         super(numBatches, pageSize, numPages, typeTraits, cmpFactories);
         this.fillFactor = fillFactor;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
index 5b84652..f6809b2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
@@ -23,7 +23,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
 import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
@@ -38,20 +37,20 @@
     protected static final int HYRACKS_FRAME_SIZE = 128;
 
     public BTreeRunner(int numTuples, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksDataException {
+            IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException {
         super(numTuples, pageSize, numPages, typeTraits, cmpFactories);
     }
 
     @Override
     protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
-            throws HyracksDataException, BTreeException {
+            throws HyracksDataException {
         IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
         TestStorageManagerComponentHolder.init(pageSize, numPages, MAX_OPEN_FILES);
         bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
         ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
         LinkedMetaDataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
-        btree = BTreeUtils.createBTree(bufferCache, fmp, typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM,
-                file, freePageManager);
+        btree = BTreeUtils.createBTree(bufferCache, fmp, typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM, file,
+                freePageManager);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
index b4c9418..9ae5aae 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
@@ -27,14 +27,12 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import org.apache.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import org.apache.hyracks.storage.am.btree.impls.BTree;
 import org.apache.hyracks.storage.am.common.api.IPageManager;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -59,7 +57,7 @@
     protected BTree btree;
 
     public InMemoryBTreeRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksDataException {
+            IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException {
         this.numBatches = numBatches;
         TestStorageManagerComponentHolder.init(pageSize, numPages, numPages);
         IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
@@ -69,7 +67,7 @@
     }
 
     protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
-            throws HyracksDataException, BTreeException {
+            throws HyracksDataException {
         bufferCache = new VirtualBufferCache(new HeapBufferAllocator(), pageSize, numPages);
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -136,7 +134,9 @@
                     for (int j = 0; j < batch.size(); j++) {
                         try {
                             indexAccessor.insert(batch.get(j));
-                        } catch (TreeIndexException e) {
+                        } catch (Exception e) {
+                            e.printStackTrace();
+                            throw e;
                         }
                     }
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
index d4e7886b..50592a1 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
@@ -31,9 +31,7 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.control.nc.io.IOManager;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -84,7 +82,7 @@
 
     public LSMTreeRunner(int numBatches, int inMemPageSize, int inMemNumPages, int onDiskPageSize, int onDiskNumPages,
             ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields,
-            double bloomFilterFalsePositiveRate) throws BTreeException, HyracksDataException {
+            double bloomFilterFalsePositiveRate) throws HyracksDataException {
         this.numBatches = numBatches;
 
         this.onDiskPageSize = onDiskPageSize;
@@ -101,8 +99,8 @@
 
         List<IVirtualBufferCache> virtualBufferCaches = new ArrayList<>();
         for (int i = 0; i < 2; i++) {
-            IVirtualBufferCache virtualBufferCache = new VirtualBufferCache(new HeapBufferAllocator(), inMemPageSize,
-                    inMemNumPages / 2);
+            IVirtualBufferCache virtualBufferCache =
+                    new VirtualBufferCache(new HeapBufferAllocator(), inMemPageSize, inMemNumPages / 2);
             virtualBufferCaches.add(virtualBufferCache);
         }
 
@@ -110,10 +108,9 @@
         AsynchronousScheduler.INSTANCE.init(threadFactory);
 
         lsmtree = LSMBTreeUtil.createLSMTree(ioManager, virtualBufferCaches, file, bufferCache, fmp, typeTraits,
-                cmpFactories,
-                bloomFilterKeyFields, bloomFilterFalsePositiveRate, new NoMergePolicy(), new ThreadCountingTracker(),
-                ioScheduler, NoOpIOOperationCallback.INSTANCE, true, null, null, null, null, true,
-                TestStorageManagerComponentHolder.getMetadataPageManagerFactory());
+                cmpFactories, bloomFilterKeyFields, bloomFilterFalsePositiveRate, new NoMergePolicy(),
+                new ThreadCountingTracker(), ioScheduler, NoOpIOOperationCallback.INSTANCE, true, null, null, null,
+                null, true, TestStorageManagerComponentHolder.getMetadataPageManagerFactory());
     }
 
     @Override
@@ -184,7 +181,8 @@
                     for (int j = 0; j < batch.size(); j++) {
                         try {
                             lsmTreeAccessor.insert(batch.get(j));
-                        } catch (TreeIndexException e) {
+                        } catch (Exception e) {
+                            throw e;
                         }
                     }
                     dataGen.releaseBatch(batch);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
index c16577b..e03f765 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
 import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
@@ -42,7 +41,7 @@
     @Override
     protected void cleanupAndGetValidFilesInternal(FilenameFilter filter,
             TreeIndexFactory<? extends ITreeIndex> treeFactory, ArrayList<ComparableFileName> allFiles)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         File dir = new File(baseDir);
         String[] files = dir.list(filter);
         for (String fileName : files) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
index 71b2c64..9b1ea52 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
@@ -37,7 +37,6 @@
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
@@ -77,22 +76,22 @@
     }
 
     public void sortOrderTest(boolean testFlushFileName) throws InterruptedException, HyracksDataException {
-        ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
-                new DummyTreeFactory());
+        ILSMIndexFileManager fileManager =
+                new DummyLSMIndexFileManager(ioManager, fileMapProvider, file, new DummyTreeFactory());
         LinkedList<String> fileNames = new LinkedList<>();
 
         int numFileNames = 100;
         long sleepTime = 5;
         for (int i = 0; i < numFileNames; i++) {
-            String flushFileName = fileManager.getRelFlushFileReference().getInsertIndexFileReference()
-                    .getFile().getName();
+            String flushFileName =
+                    fileManager.getRelFlushFileReference().getInsertIndexFileReference().getFile().getName();
             if (testFlushFileName) {
                 fileNames.addFirst(flushFileName);
             }
             Thread.sleep(sleepTime);
             if (!testFlushFileName) {
-                String secondFlushFileName = fileManager.getRelFlushFileReference()
-                        .getInsertIndexFileReference().getFile().getName();
+                String secondFlushFileName =
+                        fileManager.getRelFlushFileReference().getInsertIndexFileReference().getFile().getName();
                 String mergeFileName = getMergeFileName(fileManager, flushFileName, secondFlushFileName);
                 fileNames.addFirst(mergeFileName);
                 Thread.sleep(sleepTime);
@@ -117,14 +116,14 @@
         sortOrderTest(false);
     }
 
-    public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException, IndexException {
+    public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException {
         String dirPath = ioManager.getIODevices().get(DEFAULT_IO_DEVICE_ID).getMount() + sep + "lsm_tree"
                 + simpleDateFormat.format(new Date()) + sep;
         File f = new File(dirPath);
         f.mkdirs();
         FileReference file = ioManager.resolveAbsolutePath(f.getAbsolutePath());
-        ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
-                new DummyTreeFactory());
+        ILSMIndexFileManager fileManager =
+                new DummyLSMIndexFileManager(ioManager, fileMapProvider, file, new DummyTreeFactory());
         fileManager.createDirs();
 
         List<FileReference> flushFiles = new ArrayList<>();
@@ -192,8 +191,8 @@
         // Check actual files against expected files.
         assertEquals(expectedValidFiles.size(), lsmComonentFileReference.size());
         for (int i = 0; i < expectedValidFiles.size(); i++) {
-            assertEquals(expectedValidFiles.get(i), lsmComonentFileReference.get(i).getInsertIndexFileReference()
-                    .getFile().getName());
+            assertEquals(expectedValidFiles.get(i),
+                    lsmComonentFileReference.get(i).getInsertIndexFileReference().getFile().getName());
         }
 
         // Make sure invalid files were removed from the IODevices.
@@ -220,7 +219,7 @@
     }
 
     @Test
-    public void singleIODeviceTest() throws InterruptedException, IOException, IndexException {
+    public void singleIODeviceTest() throws InterruptedException, IOException {
         IOManager singleDeviceIOManager = createIOManager(1);
         cleanInvalidFilesTest(singleDeviceIOManager);
         cleanDirs(singleDeviceIOManager);
@@ -253,8 +252,8 @@
 
     private FileReference simulateMerge(ILSMIndexFileManager fileManager, FileReference a, FileReference b)
             throws HyracksDataException {
-        LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(a.getFile().getName(), b
-                .getFile().getName());
+        LSMComponentFileReferences relMergeFileRefs =
+                fileManager.getRelMergeFileReference(a.getFile().getName(), b.getFile().getName());
         return relMergeFileRefs.getInsertIndexFileReference();
     }
 
@@ -262,7 +261,7 @@
             throws HyracksDataException {
         File f1 = new File(firstFile);
         File f2 = new File(lastFile);
-        return fileNameManager.getRelMergeFileReference(f1.getName(), f2.getName())
-                .getInsertIndexFileReference().getFile().getName();
+        return fileNameManager.getRelMergeFileReference(f1.getName(), f2.getName()).getInsertIndexFileReference()
+                .getFile().getName();
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
index f336d4a..8b419b3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
@@ -22,7 +22,6 @@
 import java.io.IOException;
 
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -43,13 +42,12 @@
     }
 
     @Override
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
+    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException {
         IIndex invIndex = testCtx.getIndex();
         invIndex.create();
         invIndex.activate();
-        ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+        ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(NoOpOperationCallback.INSTANCE,
+                NoOpOperationCallback.INSTANCE);
 
         for (int i = 0; i < maxTreesToMerge; i++) {
             for (int j = 0; j < i; j++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
index 8aed83a..550d312 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
@@ -21,8 +21,8 @@
 
 import java.io.IOException;
 
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -43,13 +43,13 @@
     }
 
     @Override
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
+    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen)
+            throws IOException, HyracksDataException {
         IIndex invIndex = testCtx.getIndex();
         invIndex.create();
         invIndex.activate();
-        ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+        ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(NoOpOperationCallback.INSTANCE,
+                NoOpOperationCallback.INSTANCE);
 
         for (int i = 0; i < maxTreesToMerge; i++) {
             for (int j = 0; j < i; j++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
index e59f85c..f4290bb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
@@ -21,15 +21,13 @@
 
 import java.io.IOException;
 
-import org.junit.Test;
-
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.junit.Test;
 
 public abstract class AbstractInvertedIndexDeleteTest extends AbstractInvertedIndexTest {
 
@@ -42,11 +40,11 @@
         this.bulkLoad = bulkLoad;
     }
 
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
+    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException {
         IIndex invIndex = testCtx.getIndex();
 
-        if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM) || !bulkLoad) {
+        if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM)
+                || !bulkLoad) {
             invIndex.create();
             invIndex.activate();
         }
@@ -66,8 +64,8 @@
             }
 
             // Delete all documents in a couple of rounds.
-            int numTuplesPerDeleteRound = (int) Math.ceil((float) testCtx.getDocumentCorpus().size()
-                    / (float) numDeleteRounds);
+            int numTuplesPerDeleteRound =
+                    (int) Math.ceil((float) testCtx.getDocumentCorpus().size() / (float) numDeleteRounds);
             for (int j = 0; j < numDeleteRounds; j++) {
                 LSMInvertedIndexTestUtils.deleteFromInvIndex(testCtx, harness.getRandom(), numTuplesPerDeleteRound);
                 validateAndCheckIndex(testCtx);
@@ -80,33 +78,33 @@
     }
 
     @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
-                invIndexType);
+    public void wordTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
 
     @Test
-    public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
-                invIndexType);
+    public void hashedWordTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
 
     @Test
-    public void ngramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness,
-                invIndexType);
+    public void ngramTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
 
     @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
-                invIndexType);
+    public void hashedNGramTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
index 048f3e4..6f006f8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
@@ -22,7 +22,6 @@
 import java.io.IOException;
 
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
@@ -38,13 +37,12 @@
         this.bulkLoad = bulkLoad;
     }
 
-    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
+    protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException {
         IIndex invIndex = testCtx.getIndex();
         invIndex.create();
         invIndex.activate();
         if (bulkLoad) {
-                LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT, true);
+            LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT, true);
         } else {
             LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
         }
@@ -56,33 +54,33 @@
     }
 
     @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
-                invIndexType);
+    public void wordTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
 
     @Test
-    public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
-                invIndexType);
+    public void hashedWordTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
 
     @Test
-    public void ngramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness,
-                invIndexType);
+    public void ngramTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
 
     @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
-                invIndexType);
+    public void hashedNGramTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
         runTest(testCtx, tupleGen);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
index 3a188fe..3a63e55 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
@@ -26,7 +26,6 @@
 import java.util.logging.Logger;
 
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
@@ -52,9 +51,10 @@
     }
 
     protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen,
-            List<IInvertedIndexSearchModifier> searchModifiers) throws IOException, IndexException {
+            List<IInvertedIndexSearchModifier> searchModifiers) throws IOException {
         IIndex invIndex = testCtx.getIndex();
-        if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM) || !bulkLoad) {
+        if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM)
+                || !bulkLoad) {
             invIndex.create();
             invIndex.activate();
         }
@@ -81,9 +81,9 @@
         invIndex.destroy();
     }
 
-    private void testWordInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException, IndexException {
+    private void testWordInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException {
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
-        List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<IInvertedIndexSearchModifier>();
+        List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<>();
         searchModifiers.add(new ConjunctiveSearchModifier());
         searchModifiers.add(new JaccardSearchModifier(1.0f));
         searchModifiers.add(new JaccardSearchModifier(0.8f));
@@ -91,9 +91,9 @@
         runTest(testCtx, tupleGen, searchModifiers);
     }
 
-    private void testNGramInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException, IndexException {
+    private void testNGramInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException {
         TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
-        List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<IInvertedIndexSearchModifier>();
+        List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<>();
         searchModifiers.add(new ConjunctiveSearchModifier());
         searchModifiers.add(new JaccardSearchModifier(1.0f));
         searchModifiers.add(new JaccardSearchModifier(0.8f));
@@ -106,30 +106,30 @@
     }
 
     @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
-                invIndexType);
+    public void wordTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
         testWordInvIndexIndex(testCtx);
     }
 
     @Test
-    public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
-                invIndexType);
+    public void hashedWordTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
         testWordInvIndexIndex(testCtx);
     }
 
     @Test
-    public void ngramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness,
-                invIndexType);
+    public void ngramTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
         testNGramInvIndexIndex(testCtx);
     }
 
     @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
-        LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
-                invIndexType);
+    public void hashedNGramTokensInvIndexTest() throws IOException {
+        LSMInvertedIndexTestContext testCtx =
+                LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
         testNGramInvIndexIndex(testCtx);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
index df4062f..2115eac 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
@@ -25,7 +25,6 @@
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
@@ -73,7 +72,7 @@
      * This test is only for verifying the integrity and correctness of the index,
      * it does not ensure the correctness of index searches.
      */
-    protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException, IndexException {
+    protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException {
         IIndex invIndex = testCtx.getIndex();
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Validating index: " + invIndex);
@@ -90,8 +89,8 @@
     /**
      * Runs a workload of queries using different search modifiers, and verifies the correctness of the results.
      */
-    protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
-            IndexException {
+    protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen)
+            throws IOException {
         for (IInvertedIndexSearchModifier searchModifier : TEST_SEARCH_MODIFIERS) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Running test workload with: " + searchModifier.toString());
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
index 88a8abd..e0427e8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
@@ -27,8 +27,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -61,8 +59,7 @@
     }
 
     protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numThreads,
-            TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException,
-            HyracksDataException {
+            TestWorkloadConf conf, String dataMsg) throws InterruptedException, HyracksDataException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("LSMInvertedIndex MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
                     + "; Workload: " + conf.toString() + ".");
@@ -88,36 +85,36 @@
 
         // Insert only workload.
         TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
-        workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertOnlyOps.length)));
+        workloadConfs
+                .add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper.getUniformProbDist(insertOnlyOps.length)));
 
         // Insert and merge workload.
         TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertMergeOps.length)));
+        workloadConfs
+                .add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper.getUniformProbDist(insertMergeOps.length)));
 
         // Inserts mixed with point searches and scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
-                TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
-                .getUniformProbDist(insertSearchOnlyOps.length)));
+        TestOperation[] insertSearchOnlyOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN };
+        workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps,
+                ProbabilityHelper.getUniformProbDist(insertSearchOnlyOps.length)));
 
         // Inserts, and deletes.
         TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateOps.length)));
+        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps,
+                ProbabilityHelper.getUniformProbDist(insertDeleteUpdateOps.length)));
 
         // Inserts, deletes and merges.
-        TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.MERGE };
-        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, ProbabilityHelper
-                .getUniformProbDist(insertDeleteUpdateMergeOps.length)));
+        TestOperation[] insertDeleteUpdateMergeOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.MERGE };
+        workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps,
+                ProbabilityHelper.getUniformProbDist(insertDeleteUpdateMergeOps.length)));
 
         // All operations except merge.
         TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
                 TestOperation.POINT_SEARCH, TestOperation.SCAN };
-        workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
-                .getUniformProbDist(allNoMergeOps.length)));
+        workloadConfs
+                .add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
 
         // All operations.
         TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
@@ -128,14 +125,14 @@
     }
 
     @Test
-    public void wordTokensInvIndexTest() throws IOException, IndexException, InterruptedException {
+    public void wordTokensInvIndexTest() throws IOException, InterruptedException {
         String dataMsg = "Documents";
         int[] numThreads = new int[] { REGULAR_NUM_THREADS, EXCESSIVE_NUM_THREADS };
         for (int i = 0; i < numThreads.length; i++) {
             for (TestWorkloadConf conf : workloadConfs) {
                 setUp();
-                LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
-                        getIndexType());
+                LSMInvertedIndexTestContext testCtx =
+                        LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, getIndexType());
                 TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
                 runTest(testCtx, tupleGen, numThreads[i], conf, dataMsg);
                 tearDown();
@@ -144,14 +141,14 @@
     }
 
     @Test
-    public void hashedNGramTokensInvIndexTest() throws IOException, IndexException, InterruptedException {
+    public void hashedNGramTokensInvIndexTest() throws IOException, InterruptedException {
         String dataMsg = "Person Names";
         int[] numThreads = new int[] { REGULAR_NUM_THREADS, EXCESSIVE_NUM_THREADS };
         for (int i = 0; i < numThreads.length; i++) {
             for (TestWorkloadConf conf : workloadConfs) {
                 setUp();
-                LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(
-                        harness, getIndexType());
+                LSMInvertedIndexTestContext testCtx =
+                        LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, getIndexType());
                 TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
                 runTest(testCtx, tupleGen, numThreads[i], conf, dataMsg);
                 tearDown();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
index 9e24600..d0a90b7 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
@@ -23,6 +23,7 @@
 import java.util.List;
 import java.util.Random;
 
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.dataflow.common.utils.TupleUtils;
@@ -32,11 +33,9 @@
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexAccessor;
 import org.apache.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifier;
@@ -47,7 +46,7 @@
 public class LSMInvertedIndexTestWorker extends AbstractIndexTestWorker {
 
     protected final LSMInvertedIndex invIndex;
-    protected final List<ITupleReference> documentCorpus = new ArrayList<ITupleReference>();
+    protected final List<ITupleReference> documentCorpus = new ArrayList<>();
     protected final Random rnd = new Random(50);
 
     protected final IInvertedIndexSearchModifier[] TEST_SEARCH_MODIFIERS = new IInvertedIndexSearchModifier[] {
@@ -60,7 +59,7 @@
     }
 
     @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
         LSMInvertedIndexAccessor accessor = (LSMInvertedIndexAccessor) indexAccessor;
         IIndexCursor searchCursor = accessor.createSearchCursor(false);
         IIndexCursor rangeSearchCursor = accessor.createRangeSearchCursor();
@@ -99,8 +98,11 @@
                 try {
                     accessor.search(searchCursor, searchPred);
                     consumeCursorTuples(searchCursor);
-                } catch (OccurrenceThresholdPanicException e) {
+                } catch (HyracksDataException e) {
                     // Ignore.
+                    if (e.getErrorCode() != ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
+                        throw e;
+                    }
                 }
                 break;
             }
@@ -122,8 +124,7 @@
         }
     }
 
-    private void insert(LSMInvertedIndexAccessor accessor, ITupleReference tuple) throws HyracksDataException,
-            IndexException {
+    private void insert(LSMInvertedIndexAccessor accessor, ITupleReference tuple) throws HyracksDataException {
         // Ignore ongoing merges. Do an insert instead.
         accessor.insert(tuple);
         // Add tuple to document corpus so we can delete it.
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
index cae6a7d..6e17fff 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
@@ -30,6 +30,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -38,11 +39,9 @@
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.common.CheckTuple;
 import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.freepage.VirtualFreePageManager;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.InvertedIndexException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
 
 @SuppressWarnings("rawtypes")
@@ -103,11 +102,11 @@
             ISerializerDeserializer[] fieldSerdes, int tokenFieldCount, IBinaryTokenizerFactory tokenizerFactory,
             InvertedIndexType invIndexType, int[] invertedIndexFields, ITypeTraits[] filterTypeTraits,
             IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields, int[] filterFieldsForNonBulkLoadOps,
-            int[] invertedIndexFieldsForNonBulkLoadOps) throws IndexException, HyracksDataException {
+            int[] invertedIndexFieldsForNonBulkLoadOps) throws HyracksDataException {
         ITypeTraits[] allTypeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
         IOManager ioManager = harness.getIOManager();
-        IBinaryComparatorFactory[] allCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
-                fieldSerdes.length);
+        IBinaryComparatorFactory[] allCmpFactories =
+                SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
         // Set token type traits and comparators.
         ITypeTraits[] tokenTypeTraits = new ITypeTraits[tokenFieldCount];
         IBinaryComparatorFactory[] tokenCmpFactories = new IBinaryComparatorFactory[tokenFieldCount];
@@ -129,17 +128,17 @@
         switch (invIndexType) {
             case INMEMORY: {
                 invIndex = InvertedIndexUtils.createInMemoryBTreeInvertedindex(harness.getVirtualBufferCaches().get(0),
-                        new VirtualFreePageManager(harness.getVirtualBufferCaches().get(0)),
-                        invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+                        new VirtualFreePageManager(harness.getVirtualBufferCaches().get(0)), invListTypeTraits,
+                        invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
                         ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
                 break;
             }
             case PARTITIONED_INMEMORY: {
-                invIndex = InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(harness
-                        .getVirtualBufferCaches().get(0), new VirtualFreePageManager(harness.getVirtualBufferCaches()
-                                .get(0)), invListTypeTraits,
-                        invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, tokenizerFactory, ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
+                invIndex = InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(
+                        harness.getVirtualBufferCaches().get(0),
+                        new VirtualFreePageManager(harness.getVirtualBufferCaches().get(0)), invListTypeTraits,
+                        invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+                        ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
                 break;
             }
             case ONDISK: {
@@ -149,10 +148,10 @@
                 break;
             }
             case PARTITIONED_ONDISK: {
-                invIndex = InvertedIndexUtils.createPartitionedOnDiskInvertedIndex(ioManager, harness
-                        .getDiskBufferCache(),
-                        harness.getDiskFileMapProvider(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, harness.getInvListsFileRef(), harness.getMetadataPageManagerFactory());
+                invIndex = InvertedIndexUtils.createPartitionedOnDiskInvertedIndex(ioManager,
+                        harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), invListTypeTraits,
+                        invListCmpFactories, tokenTypeTraits, tokenCmpFactories, harness.getInvListsFileRef(),
+                        harness.getMetadataPageManagerFactory());
                 break;
             }
             case LSM: {
@@ -162,24 +161,23 @@
                         harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
                         harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(),
                         invertedIndexFields, filterTypeTraits, filterCmpFactories, filterFields,
-                        filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true, harness
-                                .getMetadataPageManagerFactory());
+                        filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+                        harness.getMetadataPageManagerFactory());
                 break;
             }
             case PARTITIONED_LSM: {
-                invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager, harness
-                        .getVirtualBufferCaches(),
-                        harness.getDiskFileMapProvider(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, tokenizerFactory, harness.getDiskBufferCache(), harness.getOnDiskDir(),
-                        harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
-                        harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(),
-                        invertedIndexFields, filterTypeTraits, filterCmpFactories, filterFields,
-                        filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true, harness
-                                .getMetadataPageManagerFactory());
+                invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager,
+                        harness.getVirtualBufferCaches(), harness.getDiskFileMapProvider(), invListTypeTraits,
+                        invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+                        harness.getDiskBufferCache(), harness.getOnDiskDir(), harness.getBoomFilterFalsePositiveRate(),
+                        harness.getMergePolicy(), harness.getOperationTracker(), harness.getIOScheduler(),
+                        harness.getIOOperationCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+                        filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+                        harness.getMetadataPageManagerFactory());
                 break;
             }
             default: {
-                throw new InvertedIndexException("Unknow inverted-index type '" + invIndexType + "'.");
+                throw HyracksDataException.create(ErrorCode.UNKNOWN_INVERTED_INDEX_TYPE, invIndexType);
             }
         }
         InvertedIndexTokenizingTupleIterator indexTupleIter = null;
@@ -194,17 +192,17 @@
             case PARTITIONED_INMEMORY:
             case PARTITIONED_ONDISK:
             case PARTITIONED_LSM: {
-                indexTupleIter = new PartitionedInvertedIndexTokenizingTupleIterator(
-                        invIndex.getTokenTypeTraits().length, invIndex.getInvListTypeTraits().length,
-                        tokenizerFactory.createTokenizer());
+                indexTupleIter =
+                        new PartitionedInvertedIndexTokenizingTupleIterator(invIndex.getTokenTypeTraits().length,
+                                invIndex.getInvListTypeTraits().length, tokenizerFactory.createTokenizer());
                 break;
             }
             default: {
-                throw new InvertedIndexException("Unknow inverted-index type '" + invIndexType + "'.");
+                throw HyracksDataException.create(ErrorCode.UNKNOWN_INVERTED_INDEX_TYPE, invIndexType);
             }
         }
-        LSMInvertedIndexTestContext testCtx = new LSMInvertedIndexTestContext(fieldSerdes, invIndex, tokenizerFactory,
-                invIndexType, indexTupleIter);
+        LSMInvertedIndexTestContext testCtx =
+                new LSMInvertedIndexTestContext(fieldSerdes, invIndex, tokenizerFactory, invIndexType, indexTupleIter);
         return testCtx;
     }
 
@@ -240,8 +238,8 @@
     public CheckTuple createCheckTuple(ITupleReference tuple) throws HyracksDataException {
         CheckTuple checkTuple = new CheckTuple(fieldSerdes.length, fieldSerdes.length);
         for (int i = 0; i < fieldSerdes.length; i++) {
-            ByteArrayInputStream bains = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
+            ByteArrayInputStream bains =
+                    new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
             DataInput in = new DataInputStream(bains);
             Comparable field = (Comparable) fieldSerdes[i].deserialize(in);
             checkTuple.appendField(field);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
index a5d77b0..157f86b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
@@ -36,6 +36,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.data.std.util.GrowableArray;
@@ -50,7 +51,6 @@
 import org.apache.hyracks.storage.am.common.CheckTuple;
 import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DocumentStringFieldValueGenerator;
 import org.apache.hyracks.storage.am.common.datagen.IFieldValueGenerator;
 import org.apache.hyracks.storage.am.common.datagen.PersonNameFieldValueGenerator;
@@ -64,7 +64,6 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
 import org.apache.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
 import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
 import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
 import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
@@ -87,8 +86,8 @@
         IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
         fieldGens[0] = new DocumentStringFieldValueGenerator(2, 10, 10000, rnd);
         fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
-                new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+                IntegerSerializerDeserializer.INSTANCE };
         TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
         return tupleGen;
     }
@@ -97,14 +96,14 @@
         IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
         fieldGens[0] = new PersonNameFieldValueGenerator(rnd, 0.5f);
         fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
-        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
-                new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+                IntegerSerializerDeserializer.INSTANCE };
         TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
         return tupleGen;
     }
 
     private static ISerializerDeserializer[] getNonHashedIndexFieldSerdes(InvertedIndexType invIndexType)
-            throws IndexException {
+            throws HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = null;
         switch (invIndexType) {
             case INMEMORY:
@@ -123,14 +122,14 @@
                 break;
             }
             default: {
-                throw new IndexException("Unhandled inverted index type '" + invIndexType + "'.");
+                throw new HyracksDataException("Unhandled inverted index type '" + invIndexType + "'.");
             }
         }
         return fieldSerdes;
     }
 
     private static ISerializerDeserializer[] getHashedIndexFieldSerdes(InvertedIndexType invIndexType)
-            throws IndexException {
+            throws HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = null;
         switch (invIndexType) {
             case INMEMORY:
@@ -149,58 +148,58 @@
                 break;
             }
             default: {
-                throw new IndexException("Unhandled inverted index type '" + invIndexType + "'.");
+                throw new HyracksDataException("Unhandled inverted index type '" + invIndexType + "'.");
             }
         }
         return fieldSerdes;
     }
 
     public static LSMInvertedIndexTestContext createWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
+            InvertedIndexType invIndexType) throws IOException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
         ITokenFactory tokenFactory = new UTF8WordTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-                tokenFactory);
+        IBinaryTokenizerFactory tokenizerFactory =
+                new DelimitedUTF8StringBinaryTokenizerFactory(true, false, tokenFactory);
         LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
                 fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
         return testCtx;
     }
 
     public static LSMInvertedIndexTestContext createHashedWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
+            InvertedIndexType invIndexType) throws IOException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
         ITokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-                tokenFactory);
+        IBinaryTokenizerFactory tokenizerFactory =
+                new DelimitedUTF8StringBinaryTokenizerFactory(true, false, tokenFactory);
         LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
                 fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
         return testCtx;
     }
 
     public static LSMInvertedIndexTestContext createNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
+            InvertedIndexType invIndexType) throws IOException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
         ITokenFactory tokenFactory = new UTF8NGramTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true,
-                true, false, tokenFactory);
+        IBinaryTokenizerFactory tokenizerFactory =
+                new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true, true, false, tokenFactory);
         LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
                 fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
         return testCtx;
     }
 
     public static LSMInvertedIndexTestContext createHashedNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
-            InvertedIndexType invIndexType) throws IOException, IndexException {
+            InvertedIndexType invIndexType) throws IOException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
         ITokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true,
-                true, false, tokenFactory);
+        IBinaryTokenizerFactory tokenizerFactory =
+                new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true, true, false, tokenFactory);
         LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
                 fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
         return testCtx;
     }
 
-    public static void bulkLoadInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs, boolean appendOnly)
-            throws IndexException, IOException {
+    public static void bulkLoadInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs,
+            boolean appendOnly) throws HyracksDataException, IOException {
         SortedSet<CheckTuple> tmpMemIndex = new TreeSet<>();
         // First generate the expected index by inserting the documents one-by-one.
         for (int i = 0; i < numDocs; i++) {
@@ -226,7 +225,7 @@
     }
 
     public static void insertIntoInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs)
-            throws IOException, IndexException {
+            throws IOException {
         // InMemoryInvertedIndex only supports insert.
         for (int i = 0; i < numDocs; i++) {
             ITupleReference tuple = tupleGen.next();
@@ -236,7 +235,7 @@
     }
 
     public static void deleteFromInvIndex(LSMInvertedIndexTestContext testCtx, Random rnd, int numDocsToDelete)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
         for (int i = 0; i < numDocsToDelete && !documentCorpus.isEmpty(); i++) {
             int size = documentCorpus.size();
@@ -254,15 +253,16 @@
      * Compares actual and expected indexes using the rangeSearch() method of the inverted-index accessor.
      */
     public static void compareActualAndExpectedIndexesRangeSearch(LSMInvertedIndexTestContext testCtx)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
         int tokenFieldCount = invIndex.getTokenTypeTraits().length;
         int invListFieldCount = invIndex.getInvListTypeTraits().length;
-        IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+        IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex
+                .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         IIndexCursor invIndexCursor = invIndexAccessor.createRangeSearchCursor();
         MultiComparator tokenCmp = MultiComparator.create(invIndex.getTokenCmpFactories());
-        IBinaryComparatorFactory[] tupleCmpFactories = new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
+        IBinaryComparatorFactory[] tupleCmpFactories =
+                new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
         for (int i = 0; i < tokenFieldCount; i++) {
             tupleCmpFactories[i] = invIndex.getTokenCmpFactories()[i];
         }
@@ -307,7 +307,7 @@
      */
     @SuppressWarnings("unchecked")
     public static void compareActualAndExpectedIndexes(LSMInvertedIndexTestContext testCtx)
-            throws HyracksDataException, IndexException {
+            throws HyracksDataException {
         IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
         ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
         MultiComparator invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
@@ -344,8 +344,8 @@
             CheckTuple checkHighKey = new CheckTuple(tokenFieldCount, tokenFieldCount);
             checkHighKey.appendField(token);
 
-            SortedSet<CheckTuple> expectedInvList = OrderedIndexTestUtils.getPrefixExpectedSubset(
-                    testCtx.getCheckTuples(), checkLowKey, checkHighKey);
+            SortedSet<CheckTuple> expectedInvList =
+                    OrderedIndexTestUtils.getPrefixExpectedSubset(testCtx.getCheckTuples(), checkLowKey, checkHighKey);
             Iterator<CheckTuple> expectedInvListIter = expectedInvList.iterator();
 
             // Position inverted-list cursor in actual index.
@@ -439,7 +439,8 @@
             IToken token = tokenizer.getToken();
             tokenData.reset();
             token.serializeToken(tokenData);
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tokenData.getByteArray(), 0, tokenData.getLength());
+            ByteArrayInputStream inStream =
+                    new ByteArrayInputStream(tokenData.getByteArray(), 0, tokenData.getLength());
             DataInput dataIn = new DataInputStream(inStream);
             Comparable tokenObj = (Comparable) tokenSerde.deserialize(dataIn);
             CheckTuple lowKey;
@@ -487,10 +488,10 @@
 
     public static void testIndexSearch(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, Random rnd,
             int numDocQueries, int numRandomQueries, IInvertedIndexSearchModifier searchModifier, int[] scanCountArray)
-            throws IOException, IndexException {
+            throws IOException, HyracksDataException {
         IInvertedIndex invIndex = testCtx.invIndex;
-        IInvertedIndexAccessor accessor = (IInvertedIndexAccessor) invIndex.createAccessor(
-                NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+        IInvertedIndexAccessor accessor = (IInvertedIndexAccessor) invIndex
+                .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         IBinaryTokenizer tokenizer = testCtx.getTokenizerFactory().createTokenizer();
         InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
         List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
@@ -519,9 +520,13 @@
             boolean panic = false;
             try {
                 accessor.search(resultCursor, searchPred);
-            } catch (OccurrenceThresholdPanicException e) {
+            } catch (HyracksDataException e) {
                 // ignore panic queries.
-                panic = true;
+                if (e.getErrorCode() == ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
+                    panic = true;
+                } else {
+                    throw e;
+                }
             }
 
             try {
@@ -532,12 +537,17 @@
                         while (resultCursor.hasNext()) {
                             resultCursor.next();
                             ITupleReference resultTuple = resultCursor.getTuple();
-                            int actual = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(0));
+                            int actual = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+                                    resultTuple.getFieldStart(0));
                             actualResults.add(Integer.valueOf(actual));
                         }
-                    } catch (OccurrenceThresholdPanicException e) {
-                        // Ignore panic queries.
-                        continue;
+                    } catch (HyracksDataException e) {
+                        if (e.getErrorCode() == ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
+                            // Ignore panic queries.
+                            continue;
+                        } else {
+                            throw e;
+                        }
                     }
                     Collections.sort(actualResults);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
index 1c47373..befdf07 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
@@ -21,11 +21,11 @@
 
 import java.util.logging.Level;
 
-import org.junit.Test;
-
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -35,11 +35,11 @@
 import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
 import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
+import org.junit.Test;
 
 public abstract class AbstractLSMRTreeExamplesTest extends AbstractRTreeExamplesTest {
 
@@ -62,10 +62,10 @@
         typeTraits[4] = IntegerPointable.TYPE_TRAITS;
         typeTraits[5] = IntegerPointable.TYPE_TRAITS;
         // Declare field serdes.
-        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
+        ISerializerDeserializer[] fieldSerdes =
+                { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                        IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                        IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         // Declare RTree keys.
         int rtreeKeyFieldCount = 4;
@@ -101,17 +101,17 @@
         }
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
-                rtreeCmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
 
         int[] rtreeFields = { 0, 1, 2, 3, 4 };
         ITypeTraits[] filterTypeTraits = { IntegerPointable.TYPE_TRAITS };
-        IBinaryComparatorFactory[] filterCmpFactories = { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
+        IBinaryComparatorFactory[] filterCmpFactories =
+                { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
         int[] filterFields = { 5 };
 
-        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
-                valueProviderFactories, RTreePolicyType.RTREE, rtreeFields, btreeFields, filterTypeTraits,
-                filterCmpFactories, filterFields);
+        ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+                RTreePolicyType.RTREE, rtreeFields, btreeFields, filterTypeTraits, filterCmpFactories, filterFields);
         treeIndex.create();
         treeIndex.activate();
 
@@ -121,8 +121,8 @@
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        IIndexAccessor indexAccessor =
+                treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         int numInserts = 10000;
         for (int i = 0; i < numInserts; i++) {
             int p1x = rnd.nextInt();
@@ -137,7 +137,10 @@
                     Math.max(p1y, p2y), pk, filter);
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
         }
         long end = System.currentTimeMillis();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
index cdc39ba..00cb223 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
 import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -43,17 +42,15 @@
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
             IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
             RTreePolicyType rtreePolicyType, int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
-            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException,
-            HyracksDataException {
-        return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
-                .getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories,
-                btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getBoomFilterFalsePositiveRate(),
-                harness.getMergePolicy(), harness.getOperationTracker(), harness.getIOScheduler(),
-                harness.getIOOperationCallback(),
+            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws HyracksDataException {
+        return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+                harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+                rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+                harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(), harness.getOperationTracker(),
+                harness.getIOScheduler(), harness.getIOOperationCallback(),
                 LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), rtreeFields, btreeFields,
-                filterTypeTraits, filterCmpFactories, filterFields, true, false, harness
-                        .getMetadataPageManagerFactory());
+                filterTypeTraits, filterCmpFactories, filterFields, true, false,
+                harness.getMetadataPageManagerFactory());
     }
 
     @Before
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
index 6ce70f7..835ae64 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
 import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -43,15 +42,14 @@
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
             IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
             RTreePolicyType rtreePolicyType, int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
-            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException,
-            HyracksDataException {
+            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws HyracksDataException {
         return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(harness.getIOManager(), harness.getVirtualBufferCaches(),
                 harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
                 rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
                 harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(),
                 LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), rtreeFields,
-                filterTypeTraits, filterCmpFactories, filterFields, true, false, harness
-                        .getMetadataPageManagerFactory());
+                filterTypeTraits, filterCmpFactories, filterFields, true, false,
+                harness.getMetadataPageManagerFactory());
     }
 
     @Before
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
index 06501b4..406e0f8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
@@ -28,7 +28,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 
@@ -78,7 +77,7 @@
         rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
     }
 
-    protected void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
+    protected void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
         try {
             while (cursor.hasNext()) {
                 cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
index d6f4fcc..e319d1d 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
 import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
@@ -60,14 +59,13 @@
     @Override
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
             IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType, int[] btreeFields) throws TreeIndexException, HyracksDataException {
-        return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
-                .getFileReference(),
-                harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories,
-                btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getBoomFilterFalsePositiveRate(),
-                harness.getMergePolicy(), harness.getOperationTracker(), harness.getIOScheduler(),
-                harness.getIOOperationCallback(), LSMRTreeUtils.proposeBestLinearizer(typeTraits,
-                        rtreeCmpFactories.length), null, btreeFields, null,
+            RTreePolicyType rtreePolicyType, int[] btreeFields) throws HyracksDataException {
+        return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+                harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+                rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+                harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(), harness.getOperationTracker(),
+                harness.getIOScheduler(), harness.getIOOperationCallback(),
+                LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), null, btreeFields, null,
                 null, null, true, false, harness.getMetadataPageManagerFactory());
     }
 
@@ -101,14 +99,14 @@
                 new TestWorkloadConf(insertDeleteOps, ProbabilityHelper.getUniformProbDist(insertDeleteOps.length)));
 
         // Inserts, deletes and merges.
-        TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.MERGE };
+        TestOperation[] insertDeleteMergeOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.MERGE };
         workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps,
                 ProbabilityHelper.getUniformProbDist(insertDeleteMergeOps.length)));
 
         // All operations except merge.
-        TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.SCAN };
+        TestOperation[] allNoMergeOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN };
         workloadConfs
                 .add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
index 8099582..5f783da 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
@@ -28,7 +28,6 @@
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
@@ -52,7 +51,7 @@
     }
 
     @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
         LSMRTreeAccessor accessor = (LSMRTreeAccessor) indexAccessor;
         ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
         MultiComparator cmp = accessor.getMultiComparator();
@@ -116,7 +115,7 @@
         rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
     }
 
-    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
+    private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
         try {
             while (cursor.hasNext()) {
                 cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
index d72011b..987e015 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
 import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
@@ -41,7 +40,8 @@
 
     private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
 
-    private final LSMRTreeWithAntiMatterTuplesTestWorkerFactory workerFactory = new LSMRTreeWithAntiMatterTuplesTestWorkerFactory();
+    private final LSMRTreeWithAntiMatterTuplesTestWorkerFactory workerFactory =
+            new LSMRTreeWithAntiMatterTuplesTestWorkerFactory();
 
     public LSMRTreeWithAntiMatterTuplesMultiThreadTest() {
         super(false, RTreeType.LSMRTREE_WITH_ANTIMATTER);
@@ -60,7 +60,7 @@
     @Override
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
             IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType, int[] btreeFields) throws TreeIndexException, HyracksDataException {
+            RTreePolicyType rtreePolicyType, int[] btreeFields) throws HyracksDataException {
         return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(harness.getIOManager(), harness.getVirtualBufferCaches(),
                 harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
                 rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
@@ -99,14 +99,14 @@
                 new TestWorkloadConf(insertDeleteOps, ProbabilityHelper.getUniformProbDist(insertDeleteOps.length)));
 
         // Inserts, deletes and merges.
-        TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.MERGE };
+        TestOperation[] insertDeleteMergeOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.MERGE };
         workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps,
                 ProbabilityHelper.getUniformProbDist(insertDeleteMergeOps.length)));
 
         // All operations except merge.
-        TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
-                TestOperation.SCAN };
+        TestOperation[] allNoMergeOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN };
         workloadConfs
                 .add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
index 4e23e8f..4232207 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
@@ -35,13 +34,13 @@
 
 public class LSMRTreeWithAntiMatterTuplesTestWorker extends AbstractLSMRTreeTestWorker {
 
-    public LSMRTreeWithAntiMatterTuplesTestWorker(DataGenThread dataGen, TestOperationSelector opSelector,
-            IIndex index, int numBatches) throws HyracksDataException {
+    public LSMRTreeWithAntiMatterTuplesTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
+            int numBatches) throws HyracksDataException {
         super(dataGen, opSelector, index, numBatches);
     }
 
     @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
         LSMRTreeWithAntiMatterTuplesAccessor accessor = (LSMRTreeWithAntiMatterTuplesAccessor) indexAccessor;
         ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
         MultiComparator cmp = accessor.getMultiComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
index 30ab9c2..ecc6fe4 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
 import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
 import org.apache.hyracks.storage.am.rtree.utils.RTreeTestHarness;
@@ -53,7 +52,7 @@
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
             IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
             RTreePolicyType rtreePolicyType, int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
-            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException {
+            IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws HyracksDataException {
         return RTreeUtils.createRTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits,
                 valueProviderFactories, rtreeCmpFactories, rtreePolicyType, harness.getFileReference(), false,
                 harness.getMetadataManagerFactory());
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
index 256c98f..f15be9b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -26,6 +26,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -39,7 +40,6 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -106,16 +106,16 @@
         cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
         // create value providers
-        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
-                .createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
+        IPrimitiveValueProviderFactory[] valueProviderFactories =
+                RTreeUtils.createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
 
         RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
         ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
                 valueProviderFactories, RTreePolicyType.RTREE, false);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
-                valueProviderFactories, RTreePolicyType.RTREE, false);
+        ITreeIndexFrameFactory leafFrameFactory =
+                new RTreeNSMLeafFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
 
         IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
         IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
@@ -128,8 +128,8 @@
 
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         ArrayTupleReference tuple = new ArrayTupleReference();
-        ITreeIndexAccessor indexAccessor = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
-                NoOpOperationCallback.INSTANCE);
+        ITreeIndexAccessor indexAccessor =
+                rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
         int numInserts = 10000;
         ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<>();
         for (int i = 0; i < numInserts; i++) {
@@ -144,7 +144,10 @@
                     Math.max(p1y, p2y), pk);
             try {
                 indexAccessor.insert(tuple);
-            } catch (TreeIndexException e) {
+            } catch (HyracksDataException e) {
+                if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+                    throw e;
+                }
             }
             RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
             checkTuple.appendField(Math.min(p1x, p2x));
@@ -165,10 +168,10 @@
         ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
         SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
 
-        RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes,
-                keyFieldCount);
-        HashMultiSet<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples,
-                keyCheck);
+        RTreeCheckTuple keyCheck =
+                (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes, keyFieldCount);
+        HashMultiSet<RTreeCheckTuple> expectedResult =
+                rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
 
         rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
         indexAccessor.search(searchCursor, searchPredicate);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
index c8fec8c..0a27be3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
 import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest.RTreeType;
 import org.apache.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
@@ -60,7 +59,7 @@
     @Override
     protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
             IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
-            RTreePolicyType rtreePolicyType, int[] btreeFields) throws TreeIndexException {
+            RTreePolicyType rtreePolicyType, int[] btreeFields) throws HyracksDataException {
         return RTreeUtils.createRTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits,
                 valueProviderFactories, rtreeCmpFactories, rtreePolicyType, harness.getFileReference(), false,
                 harness.getMetadataManagerFactory());
@@ -82,8 +81,8 @@
                 .add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper.getUniformProbDist(insertOnlyOps.length)));
 
         // Inserts mixed with scans.
-        TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN,
-                TestOperation.DISKORDER_SCAN };
+        TestOperation[] insertSearchOnlyOps =
+                new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
         workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps,
                 ProbabilityHelper.getUniformProbDist(insertSearchOnlyOps.length)));
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
index ecca1e7..898c051 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.rtree.impls.RTree;
@@ -51,7 +50,7 @@
     }
 
     @Override
-    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+    public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
         RTree.RTreeAccessor accessor = (RTree.RTreeAccessor) indexAccessor;
         IIndexCursor searchCursor = accessor.createSearchCursor(false);
         ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
index 69f4a3c..2b4c4c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
@@ -42,6 +42,7 @@
 import org.junit.Test;
 
 public class BufferCacheRegressionTest {
+    protected String dirName = "target";
     protected String fileName = "flushTestFile";
     private static final int PAGE_SIZE = 256;
     private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
@@ -53,19 +54,22 @@
     // invalidated, but most not be flushed.
     // 2. If the file was not deleted, then we must flush its dirty pages.
     @Before
-    public void setUp() throws IOException{
+    public void setUp() throws IOException {
         resetState();
     }
+
     @After
-    public void tearDown() throws IOException{
+    public void tearDown() throws IOException {
         resetState();
     }
-    private void resetState() throws IOException{
-        File f = new File(fileName);
+
+    private void resetState() throws IOException {
+        File f = new File(dirName, fileName);
         if (f.exists()) {
             f.delete();
         }
     }
+
     @Test
     public void testFlushBehaviorOnFileEviction() throws IOException {
         flushBehaviorTest(true);
@@ -120,8 +124,8 @@
         // physical memory again, and for performance reasons pages are never
         // reset with 0's.
         FileReference testFileRef = ioManager.resolve(fileName);
-        IFileHandle testFileHandle = ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY,
-                FileSyncMode.METADATA_SYNC_DATA_SYNC);
+        IFileHandle testFileHandle =
+                ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY, FileSyncMode.METADATA_SYNC_DATA_SYNC);
         ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE + BufferCache.RESERVED_HEADER_BYTES);
         ioManager.syncRead(testFileHandle, 0, testBuffer);
         for (int i = BufferCache.RESERVED_HEADER_BYTES; i < testBuffer.capacity(); i++) {