Continue Cleaning Up File References and Splits

1. Make FileSplit an abstract class with two subclasses;
   Managed and Unmanaged. A Managed FileSplit can be mapped
   in a new subclass MappedFileSplit that maps a relative path to an
   IO device. UnmanagedFileSplit is for files outside the io devices.
2. Remove all usages of absolute paths in file split in test cases. The
   only remaining place is the write statement.
3. Fix some of the hidden issues in the tests that were working because
   of our use of the absolute paths.
4. Revert the decision of selecting the IO device to the CC.

Change-Id: I166af8f9b3a2257f94d7b05db94888fb7cb4c79e
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1359
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: abdullah alamoudi <bamousaa@gmail.com>
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index fc982d8..977a7b0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -157,6 +157,7 @@
 import org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn;
 import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 
 /**
  * Each visit returns a pair of an operator and a variable. The variable
@@ -735,7 +736,7 @@
         String filePath = outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX
                 + outputFileID.incrementAndGet();
         AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.INSTANCE.getMetadataProperties();
-        return new FileSplit(metadataProperties.getMetadataNodeName(), filePath, true);
+        return new ManagedFileSplit(metadataProperties.getMetadataNodeName(), filePath);
     }
 
     @Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
index 7982f12..ac1a5aa 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
@@ -140,8 +140,7 @@
         for (FileSplit split : fileSplits) {
             String ipAddress = nodeMap.get(split.getNodeName()).getNetworkAddress().getAddress().toString();
             String path = split.getPath();
-            boolean relative = split.isManaged();
-            FilePartition partition = new FilePartition(ipAddress, path, relative);
+            FilePartition partition = new FilePartition(ipAddress, path);
             partititons.put(partition.toJSONObject());
         }
         // Generates the response object which contains the splits.
@@ -152,12 +151,10 @@
 class FilePartition {
     private final String ipAddress;
     private final String path;
-    private final boolean relative;
 
-    public FilePartition(String ipAddress, String path, boolean relative) {
+    public FilePartition(String ipAddress, String path) {
         this.ipAddress = ipAddress;
         this.path = path;
-        this.relative = relative;
     }
 
     public String getIPAddress() {
@@ -168,10 +165,6 @@
         return path;
     }
 
-    public boolean isRelative() {
-        return relative;
-    }
-
     @Override
     public String toString() {
         return ipAddress + ":" + path;
@@ -181,7 +174,6 @@
         JSONObject partition = new JSONObject();
         partition.put("ip", ipAddress);
         partition.put("path", path);
-        partition.put("relative", relative);
         return partition;
     }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
index 66a55c7..34746d3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
@@ -87,7 +87,7 @@
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc);
         spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0);
         spec.addRoot(nullSink);
-        return new Pair<JobSpecification, IAdapterFactory>(spec, adapterFactory);
+        return new Pair<>(spec, adapterFactory);
     }
 
     /**
@@ -131,7 +131,7 @@
             AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
             spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
             spec.addRoot(nullSink);
-            return new Pair<JobSpecification, Boolean>(spec, terminateIntakeJob);
+            return new Pair<>(spec, terminateIntakeJob);
 
         } catch (AlgebricksException e) {
             throw new AsterixException(e);
@@ -146,7 +146,7 @@
                 new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
         FeedMessageOperatorDescriptor feedMessenger =
                 new FeedMessageOperatorDescriptor(jobSpec, feedConenctionId, feedMessage);
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
+        return new Pair<>(feedMessenger, partitionConstraint);
     }
 
     private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java
index 9818116..af50b71 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java
@@ -80,7 +80,6 @@
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
 import org.apache.hyracks.api.lifecycle.ILifeCycleComponentManager;
-import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
@@ -125,7 +124,7 @@
 
     private ILSMIOOperationScheduler lsmIOScheduler;
     private PersistentLocalResourceRepository localResourceRepository;
-    private IOManager ioManager;
+    private IIOManager ioManager;
     private boolean isShuttingdown;
 
     private ActiveManager activeManager;
@@ -168,7 +167,7 @@
         Logger.getLogger("org.apache.asterix").setLevel(externalProperties.getLogLevel());
         Logger.getLogger("org.apache.hyracks").setLevel(externalProperties.getLogLevel());
 
-        ioManager = (IOManager) ncApplicationContext.getIoManager();
+        ioManager = ncApplicationContext.getIoManager();
         threadExecutor = new AsterixThreadExecutor(ncApplicationContext.getThreadFactory());
         fileMapManager = new AsterixFileMapManager(ioManager);
         ICacheMemoryAllocator allocator = new HeapBufferAllocator();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 36adb1f..7bf5195 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -198,6 +198,7 @@
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.UnmanagedFileSplit;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -419,7 +420,7 @@
             throws InstantiationException, IllegalAccessException, ClassNotFoundException {
         WriteStatement ws = (WriteStatement) stmt;
         File f = new File(ws.getFileName());
-        FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), f.getPath(), false);
+        FileSplit outputFile = new UnmanagedFileSplit(ws.getNcName().getValue(), f.getPath());
         IAWriterFactory writerFactory = null;
         if (ws.getWriterClassName() != null) {
             writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
index 675cb80..f970e50 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
@@ -174,7 +174,7 @@
         FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
         StringBuilder sb = new StringBuilder();
         for (int i = 0; i < fs.length; i++) {
-            sb.append(stringOf(fs[i]) + " ");
+            sb.append(fs[i] + " ");
         }
         LOGGER.info("CREATING File Splits: " + sb.toString());
 
@@ -204,10 +204,6 @@
         return spec;
     }
 
-    private static String stringOf(FileSplit fs) {
-        return fs.getNodeName() + ":" + fs.getPath();
-    }
-
     public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
             MetadataProvider metadata) throws AsterixException, AlgebricksException {
         String dataverseName = dataverse.getDataverseName();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
index 5d87b00..71b2df5 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
@@ -49,6 +49,7 @@
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.json.JSONArray;
 import org.json.JSONObject;
 import org.json.JSONTokener;
@@ -123,8 +124,8 @@
         ConnectorAPIServlet servlet = new ConnectorAPIServlet();
         JSONObject actualResponse = new JSONObject();
         FileSplit[] splits = new FileSplit[2];
-        splits[0] = new FileSplit("asterix_nc1", "foo1", true);
-        splits[1] = new FileSplit("asterix_nc2", "foo2", true);
+        splits[0] = new ManagedFileSplit("asterix_nc1", "foo1");
+        splits[1] = new ManagedFileSplit("asterix_nc2", "foo2");
         Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
         NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
         NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
@@ -141,12 +142,9 @@
         // Calls ConnectorAPIServlet.formResponseObject.
         nodeMap.put("asterix_nc1", mockInfo1);
         nodeMap.put("asterix_nc2", mockInfo2);
-        PA.invokeMethod(servlet,
-                "formResponseObject(" + JSONObject.class.getName() + ", " + FileSplit.class.getName() + "[], "
-                        + ARecordType.class.getName() + ", " + String.class.getName() + ", boolean, " + Map.class
-                                .getName() + ")",
-                actualResponse, splits, recordType, primaryKey, true, nodeMap);
-
+        PA.invokeMethod(servlet, "formResponseObject(" + JSONObject.class.getName() + ", " + FileSplit.class.getName()
+                + "[], " + ARecordType.class.getName() + ", " + String.class.getName() + ", boolean, " + Map.class
+                        .getName() + ")", actualResponse, splits, recordType, primaryKey, true, nodeMap);
         // Constructs expected response.
         JSONObject expectedResponse = new JSONObject();
         expectedResponse.put("temp", true);
@@ -156,11 +154,9 @@
         JSONObject element1 = new JSONObject();
         element1.put("ip", "127.0.0.1");
         element1.put("path", splits[0].getPath());
-        element1.put("relative", true);
         JSONObject element2 = new JSONObject();
         element2.put("ip", "127.0.0.2");
         element2.put("path", splits[1].getPath());
-        element2.put("relative", true);
         splitsArray.put(element1);
         splitsArray.put(element2);
         expectedResponse.put("splits", splitsArray);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 6ab4b80..e2c6a04 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -66,6 +66,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.api.util.HyracksConstants;
@@ -250,8 +251,8 @@
     }
 
     public ConstantFileSplitProvider getFileSplitProvider(Dataset dataset) {
-        FileSplit fileSplit = new FileSplit(ExecutionTestUtil.integrationUtil.ncs[0].getId(),
-                dataset.getDataverseName() + File.separator + dataset.getDatasetName(), true);
+        FileSplit fileSplit = new ManagedFileSplit(ExecutionTestUtil.integrationUtil.ncs[0].getId(),
+                dataset.getDataverseName() + File.separator + dataset.getDatasetName());
         return new ConstantFileSplitProvider(new FileSplit[] { fileSplit });
     }
 
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixFileMapManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixFileMapManager.java
index b330271..e74659d 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixFileMapManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixFileMapManager.java
@@ -23,18 +23,18 @@
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
-import org.apache.hyracks.control.nc.io.IOManager;
+import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.common.file.IFileMapManager;
 
 public class AsterixFileMapManager implements IFileMapManager {
 
     private static final long serialVersionUID = 1L;
-    private final transient IOManager ioManager;
+    private final transient IIOManager ioManager;
     private final Map<Integer, String> id2nameMap = new HashMap<>();
     private final Map<String, Integer> name2IdMap = new HashMap<>();
     private int idCounter = 0;
 
-    public AsterixFileMapManager(IOManager ioManager) {
+    public AsterixFileMapManager(IIOManager ioManager) {
         this.ioManager = ioManager;
     }
 
@@ -44,7 +44,7 @@
         if (fName == null) {
             throw new HyracksDataException("No mapping found for id: " + fileId);
         }
-        return ioManager.getFileRef(fName, false);
+        return ioManager.resolveAbsolutePath(fName);
     }
 
     @Override
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
index 685d12d..6fbbfe4 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
@@ -46,7 +46,7 @@
         final int partition = ctx.getTaskAttemptId().getTaskId().getPartition();
         IIOManager ioManager = ctx.getIOManager();
         FileSplit fileSplit = fileSplitProvider.getFileSplits()[partition];
-        FileReference fileRef = ioManager.getFileRef(fileSplit.getPath(), fileSplit.isManaged());
+        FileReference fileRef = fileSplit.getFileReference(ioManager);
         IODeviceHandle device = fileRef.getDeviceHandle();
         List<IODeviceHandle> devices = ioManager.getIODevices();
         int deviceId = 0;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
index a0fe996..fd4a9b5 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
@@ -166,34 +166,34 @@
 
     @Override
     public synchronized void open(String resourcePath) throws HyracksDataException {
-            validateDatasetLifecycleManagerState();
-            int did = getDIDfromResourcePath(resourcePath);
-            long resourceID = getResourceIDfromResourcePath(resourcePath);
+        validateDatasetLifecycleManagerState();
+        int did = getDIDfromResourcePath(resourcePath);
+        long resourceID = getResourceIDfromResourcePath(resourcePath);
 
-            DatasetResource dsr = datasets.get(did);
-            DatasetInfo dsInfo = dsr.getDatasetInfo();
-            if (dsInfo == null || !dsInfo.isRegistered()) {
-                throw new HyracksDataException(
-                        "Failed to open index with resource ID " + resourceID + " since it does not exist.");
+        DatasetResource dsr = datasets.get(did);
+        DatasetInfo dsInfo = dsr.getDatasetInfo();
+        if (dsInfo == null || !dsInfo.isRegistered()) {
+            throw new HyracksDataException(
+                    "Failed to open index with resource ID " + resourceID + " since it does not exist.");
+        }
+
+        IndexInfo iInfo = dsInfo.getIndexes().get(resourceID);
+        if (iInfo == null) {
+            throw new HyracksDataException(
+                    "Failed to open index with resource ID " + resourceID + " since it does not exist.");
+        }
+
+        dsr.open(true);
+        dsr.touch();
+
+        if (!iInfo.isOpen()) {
+            ILSMOperationTracker opTracker = iInfo.getIndex().getOperationTracker();
+            synchronized (opTracker) {
+                iInfo.getIndex().activate();
             }
-
-            IndexInfo iInfo = dsInfo.getIndexes().get(resourceID);
-            if (iInfo == null) {
-                throw new HyracksDataException(
-                        "Failed to open index with resource ID " + resourceID + " since it does not exist.");
-            }
-
-            dsr.open(true);
-            dsr.touch();
-
-            if (!iInfo.isOpen()) {
-                ILSMOperationTracker opTracker = iInfo.getIndex().getOperationTracker();
-                synchronized (opTracker) {
-                    iInfo.getIndex().activate();
-                }
-                iInfo.setOpen(true);
-            }
-            iInfo.touch();
+            iInfo.setOpen(true);
+        }
+        iInfo.touch();
     }
 
     private boolean evictCandidateDataset() throws HyracksDataException {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/Resource.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/Resource.java
index d649782..e781029 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/Resource.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/Resource.java
@@ -19,10 +19,13 @@
 package org.apache.asterix.common.transactions;
 
 import java.io.Serializable;
+import java.util.List;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.io.IIOManager;
+import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.common.file.LocalResource;
 
@@ -58,4 +61,15 @@
 
     public abstract ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException;
+
+    public static int getIoDeviceNum(IIOManager ioManager, IODeviceHandle deviceHandle) {
+        List<IODeviceHandle> ioDevices = ioManager.getIODevices();
+        for (int i = 0; i < ioDevices.size(); i++) {
+            IODeviceHandle device = ioDevices.get(i);
+            if (device == deviceHandle) {
+                return i;
+            }
+        }
+        return -1;
+    }
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
index 5816450..34af5c3 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
@@ -26,6 +26,7 @@
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.MappedFileSplit;
 import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.log4j.Level;
@@ -50,7 +51,7 @@
     }
 
     public static FileSplit getFileSplitForClusterPartition(ClusterPartition partition, String relativePath) {
-        return new FileSplit(partition.getActiveNodeId(), relativePath, true);
+        return new MappedFileSplit(partition.getActiveNodeId(), relativePath, partition.getIODeviceNum());
     }
 
     public static String prepareStoragePartitionPath(String storageDirName, int partitonId) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
index 7aeb0bc..f877796 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
@@ -39,7 +39,7 @@
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.UnmanagedFileSplit;
 
 public class LocalFSInputStreamFactory implements IInputStreamFactory {
 
@@ -49,7 +49,7 @@
     protected static final Logger LOGGER = Logger.getLogger(LocalFSInputStreamFactory.class.getName());
     protected static INodeResolver nodeResolver;
     protected Map<String, String> configuration;
-    protected FileSplit[] inputFileSplits;
+    protected UnmanagedFileSplit[] inputFileSplits;
     protected boolean isFeed;
     protected String expression;
     // transient fields (They don't need to be serialized and transferred)
@@ -64,7 +64,7 @@
             ArrayList<Path> inputResources = new ArrayList<>();
             for (int i = 0; i < inputFileSplits.length; i++) {
                 if (inputFileSplits[i].getNodeName().equals(nodeName)) {
-                    inputResources.add(inputFileSplits[i].getFile(ctx.getIOManager()).toPath());
+                    inputResources.add(inputFileSplits[i].getFile().toPath());
                 }
             }
             watcher = new FileSystemWatcher(inputResources, expression, isFeed);
@@ -100,9 +100,9 @@
     private void configureFileSplits(String[] splits) throws AsterixException {
         INodeResolver resolver = getNodeResolver();
         if (inputFileSplits == null) {
-            inputFileSplits = new FileSplit[splits.length];
-            String nodeName;
-            String nodeLocalPath;
+            inputFileSplits = new UnmanagedFileSplit[splits.length];
+            String node;
+            String path;
             int count = 0;
             String trimmedValue;
             for (String splitPath : splits) {
@@ -111,10 +111,9 @@
                     throw new AsterixException(
                             "Invalid path: " + splitPath + "\nUsage- path=\"Host://Absolute File Path\"");
                 }
-                nodeName = resolver.resolveNode(trimmedValue.split(":")[0]);
-                nodeLocalPath = trimmedValue.split("://")[1];
-                FileSplit fileSplit = new FileSplit(nodeName, nodeLocalPath, false);
-                inputFileSplits[count++] = fileSplit;
+                node = resolver.resolveNode(trimmedValue.split(":")[0]);
+                path = trimmedValue.split("://")[1];
+                inputFileSplits[count++] = new UnmanagedFileSplit(node, path);
             }
         }
     }
@@ -122,8 +121,7 @@
     private void configurePartitionConstraint() throws AsterixException {
         Set<String> locs = new TreeSet<>();
         for (int i = 0; i < inputFileSplits.length; i++) {
-            String location = inputFileSplits[i].getNodeName();
-            locs.add(location);
+            locs.add(inputFileSplits[i].getNodeName());
         }
         constraints = new AlgebricksAbsolutePartitionConstraint(locs.toArray(new String[locs.size()]));
     }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/AbstractExternalDatasetIndexesOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/AbstractExternalDatasetIndexesOperatorDescriptor.java
index 078e4f8..8080f81 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/AbstractExternalDatasetIndexesOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/AbstractExternalDatasetIndexesOperatorDescriptor.java
@@ -81,7 +81,7 @@
             public void initialize() throws HyracksDataException {
                 try {
                     FileSplit fileSplit = fileIndexInfo.getFileSplitProvider().getFileSplits()[partition];
-                    FileReference fileRef = fileSplit.getFileRef(ctx.getIOManager());
+                    FileReference fileRef = fileSplit.getFileReference(ctx.getIOManager());
                     // only in partition of device id = 0, we perform the operation on the files index
                     if (fileRef.getDeviceHandle() == ctx.getIOManager().getIODevices().get(0)) {
                         performOpOnIndex(filesIndexDataflowHelperFactory, ctx, fileIndexInfo, partition);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
index 303f76d..2ca9f82 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
@@ -100,7 +100,7 @@
     }
 
     public static FileReference getAbsoluteFileRef(String relativePath, int ioDeviceId, IIOManager ioManager) {
-        return ioManager.getFileRef(ioDeviceId, relativePath);
+        return ioManager.getFileReference(ioDeviceId, relativePath);
     }
 
     public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, int partition,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index 17ef709..63f53a6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -341,7 +341,7 @@
         String metadataPartitionPath = StoragePathUtil.prepareStoragePartitionPath(
                 ClusterProperties.INSTANCE.getStorageDirectoryName(), metadataPartition.getPartitionId());
         String resourceName = metadataPartitionPath + File.separator + index.getFileNameRelativePath();
-        FileReference file = ioManager.getFileRef(metadataDeviceId, resourceName);
+        FileReference file = ioManager.getFileReference(metadataDeviceId, resourceName);
 
         // this should not be done this way. dataset lifecycle manager shouldn't return virtual buffer caches for
         // a dataset that was not yet created
@@ -388,7 +388,9 @@
                         + " to intialize as a new instance. (WARNING: all data will be lost.)");
             }
             resourceID = resource.getId();
-            assert (index.getResourceID() == resource.getId());
+            if (index.getResourceID() != resource.getId()) {
+                throw new HyracksDataException("Resource Id doesn't match expected metadata index resource id");
+            }
             lsmBtree = (LSMBTree) dataLifecycleManager.get(file.getRelativePath());
             if (lsmBtree == null) {
                 lsmBtree = LSMBTreeUtils.createLSMTree(ioManager, virtualBufferCaches, file, bufferCache,
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
index 3083cfe..79f2780 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
@@ -50,7 +50,7 @@
     public ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException {
         IIOManager ioManager = runtimeContextProvider.getIOManager();
-        FileReference file = ioManager.getFileRef(resource.getPath(), true);
+        FileReference file = ioManager.resolve(resource.getPath());
         LSMBTree lsmBTree = LSMBTreeUtils.createExternalBTree(ioManager, file, runtimeContextProvider.getBufferCache(),
                 runtimeContextProvider.getFileMapManager(), typeTraits, cmpFactories, bloomFilterKeyFields,
                 runtimeContextProvider.getBloomFilterFalsePositiveRate(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
index 3a486a5..32aa764 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
@@ -63,7 +63,7 @@
     public ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException {
         IIOManager ioManager = runtimeContextProvider.getIOManager();
-        FileReference file = ioManager.getFileRef(resource.getPath(), true);
+        FileReference file = ioManager.resolve(resource.getPath());
         return LSMBTreeUtils.createExternalBTreeWithBuddy(ioManager, file, runtimeContextProvider.getBufferCache(),
                 runtimeContextProvider.getFileMapManager(), typeTraits, btreeCmpFactories,
                 runtimeContextProvider.getBloomFilterFalsePositiveRate(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
index 7f6c4a3..e3663fd 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
@@ -58,7 +58,7 @@
     public ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException {
         IIOManager ioManager = runtimeContextProvider.getIOManager();
-        FileReference file = ioManager.getFileRef(resource.getPath(), true);
+        FileReference file = ioManager.resolve(resource.getPath());
         try {
             return LSMRTreeUtils.createExternalRTree(ioManager, file, runtimeContextProvider.getBufferCache(),
                     runtimeContextProvider.getFileMapManager(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
index 9c52241..8eeffd2 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.transaction.management.resource;
 
-import java.util.List;
 import java.util.Map;
 
 import org.apache.asterix.common.api.IDatasetLifecycleManager;
@@ -31,7 +30,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree;
 import org.apache.hyracks.storage.am.lsm.btree.util.LSMBTreeUtils;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
@@ -76,16 +74,8 @@
     public ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException {
         IIOManager ioManager = runtimeContextProvider.getIOManager();
-        FileReference file = ioManager.getFileRef(resource.getPath(), true);
-        List<IODeviceHandle> ioDevices = ioManager.getIODevices();
-        int ioDeviceNum = 0;
-        for (int i = 0; i < ioDevices.size(); i++) {
-            IODeviceHandle device = ioDevices.get(i);
-            if (device == file.getDeviceHandle()) {
-                ioDeviceNum = i;
-                break;
-            }
-        }
+        FileReference file = ioManager.resolve(resource.getPath());
+        int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
         final IDatasetLifecycleManager datasetLifecycleManager = runtimeContextProvider.getDatasetLifecycleManager();
         LSMBTree lsmBTree = LSMBTreeUtils.createLSMTree(ioManager, datasetLifecycleManager.getVirtualBufferCaches(
                 datasetId(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
index 06bf298..fc63bbf 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
@@ -30,7 +30,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -80,16 +79,8 @@
     public ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException {
         IIOManager ioManager = runtimeContextProvider.getIOManager();
-        FileReference file = ioManager.getFileRef(resource.getPath(), true);
-        List<IODeviceHandle> ioDevices = ioManager.getIODevices();
-        int ioDeviceNum = 0;
-        for (int i = 0; i < ioDevices.size(); i++) {
-            IODeviceHandle device = ioDevices.get(i);
-            if (device == file.getDeviceHandle()) {
-                ioDeviceNum = i;
-                break;
-            }
-        }
+        FileReference file = ioManager.resolve(resource.getPath());
+        int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
         List<IVirtualBufferCache> virtualBufferCaches = runtimeContextProvider.getDatasetLifecycleManager()
                 .getVirtualBufferCaches(datasetId(), ioDeviceNum);
         try {
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
index 73c2dd9..7177391 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
@@ -31,7 +31,6 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
@@ -81,16 +80,8 @@
     public ILSMIndex createIndexInstance(IAsterixAppRuntimeContextProvider runtimeContextProvider,
             LocalResource resource) throws HyracksDataException {
         IIOManager ioManager = runtimeContextProvider.getIOManager();
-        FileReference file = ioManager.getFileRef(resource.getPath(), true);
-        List<IODeviceHandle> ioDevices = ioManager.getIODevices();
-        int ioDeviceNum = 0;
-        for (int i = 0; i < ioDevices.size(); i++) {
-            IODeviceHandle device = ioDevices.get(i);
-            if (device == file.getDeviceHandle()) {
-                ioDeviceNum = i;
-                break;
-            }
-        }
+        FileReference file = ioManager.resolve(resource.getPath());
+        int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
         List<IVirtualBufferCache> virtualBufferCaches = runtimeContextProvider.getDatasetLifecycleManager()
                 .getVirtualBufferCaches(datasetId(), ioDeviceNum);
         try {
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 9ebb8e7..8600efb 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -49,11 +49,11 @@
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.api.replication.IReplicationJob.ReplicationExecutionType;
 import org.apache.hyracks.api.replication.IReplicationJob.ReplicationJobType;
 import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
-import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
 import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 import org.apache.hyracks.storage.common.file.LocalResource;
@@ -74,7 +74,7 @@
     private static final FilenameFilter METADATA_FILES_FILTER = (File dir, String name) -> name.equalsIgnoreCase(
             METADATA_FILE_NAME);
     // Finals
-    private final IOManager ioManager;
+    private final IIOManager ioManager;
     private final String[] mountPoints;
     private final String nodeId;
     private final Cache<String, LocalResource> resourceCache;
@@ -87,7 +87,7 @@
     private IReplicationManager replicationManager;
     private Set<Integer> nodeInactivePartitions;
 
-    public PersistentLocalResourceRepository(IOManager ioManager, List<IODeviceHandle> devices, String nodeId,
+    public PersistentLocalResourceRepository(IIOManager ioManager, List<IODeviceHandle> devices, String nodeId,
             AsterixMetadataProperties metadataProperties) throws HyracksDataException {
         this.ioManager = ioManager;
         mountPoints = new String[devices.size()];
@@ -187,7 +187,7 @@
     @Override
     public synchronized void insert(LocalResource resource) throws HyracksDataException {
         String relativePath = getFileName(resource.getPath(), resource.getId());
-        FileReference resourceFile = ioManager.getFileRef(relativePath, true);
+        FileReference resourceFile = ioManager.resolve(relativePath);
         if (resourceFile.getFile().exists()) {
             throw new HyracksDataException("Duplicate resource: " + resourceFile.getAbsolutePath());
         } else {
@@ -230,10 +230,10 @@
         }
     }
 
-    private static FileReference getLocalResourceFileByName(IOManager ioManager, String resourcePath)
+    private static FileReference getLocalResourceFileByName(IIOManager ioManager, String resourcePath)
             throws HyracksDataException {
         String fileName = resourcePath + File.separator + METADATA_FILE_NAME;
-        return ioManager.getFileRef(fileName, true);
+        return ioManager.resolve(fileName);
     }
 
     public Map<Long, LocalResource> loadAndGetAllResources() throws HyracksDataException {
@@ -420,7 +420,7 @@
      * @param ioDeviceId
      * @return A file reference to the storage metadata file.
      */
-    private static FileReference getStorageMetadataFile(IOManager ioManager, String nodeId,
+    private static FileReference getStorageMetadataFile(IIOManager ioManager, String nodeId,
             int ioDeviceId) {
         String storageMetadataFileName = STORAGE_METADATA_DIRECTORY + File.separator + nodeId + "_" + "iodevice"
                 + ioDeviceId + File.separator
@@ -435,7 +435,7 @@
      * @return A file reference to the storage root directory if exists, otherwise null.
      * @throws HyracksDataException
      */
-    public static File getStorageRootDirectoryIfExists(IOManager ioManager, String nodeId,
+    public static File getStorageRootDirectoryIfExists(IIOManager ioManager, String nodeId,
             int ioDeviceId)
             throws HyracksDataException {
         File storageRootDir = null;
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepositoryFactory.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepositoryFactory.java
index 1c28e17..b46806a 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepositoryFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepositoryFactory.java
@@ -20,16 +20,16 @@
 
 import org.apache.asterix.common.config.AsterixMetadataProperties;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.control.nc.io.IOManager;
+import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 import org.apache.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
 
 public class PersistentLocalResourceRepositoryFactory implements ILocalResourceRepositoryFactory {
-    private final IOManager ioManager;
+    private final IIOManager ioManager;
     private final String nodeId;
     private final AsterixMetadataProperties metadataProperties;
 
-    public PersistentLocalResourceRepositoryFactory(IOManager ioManager, String nodeId,
+    public PersistentLocalResourceRepositoryFactory(IIOManager ioManager, String nodeId,
             AsterixMetadataProperties metadataProperties) {
         this.ioManager = ioManager;
         this.nodeId = nodeId;
diff --git a/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java b/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java
index d01bae5..5d9576f 100644
--- a/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java
+++ b/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/FileSplitUtils.java
@@ -21,6 +21,7 @@
 import java.io.File;
 
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.UnmanagedFileSplit;
 
 public class FileSplitUtils {
     public static FileSplit[] parseFileSplits(String fileSplits) {
@@ -32,7 +33,7 @@
             if (idx < 0) {
                 throw new IllegalArgumentException("File split " + s + " not well formed");
             }
-            fSplits[i] = new FileSplit(s.substring(0, idx), new File(s.substring(idx + 1)).getAbsolutePath(), false);
+            fSplits[i] = new UnmanagedFileSplit(s.substring(0, idx), new File(s.substring(idx + 1)).getAbsolutePath());
         }
         return fSplits;
     }
diff --git a/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java b/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
index 37906c1..35b433b 100644
--- a/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/src/main/java/org/apache/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
@@ -147,8 +147,8 @@
         try {
             prf = new SinkWriterRuntimeFactory(printColumns, printerFactories,
                     fileSplits[0].getFile(null), PrinterBasedWriterFactory.INSTANCE, inputDesc);
-        AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
-        return new Pair<>(prf, constraint);
+            AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
+            return new Pair<>(prf, constraint);
         } catch (HyracksDataException e) {
             throw new AlgebricksException(e);
         }
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-part1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-part1.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-part1.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-part1.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-part2.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-part2.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-part2.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-part2.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-0.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-string-part1-split-0.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-0.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-string-part1-split-0.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-string-part1-split-1.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-1.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-string-part1-split-1.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-string-part1.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/simple/int-string-part1.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/customer-part1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/customer-part1.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/customer-part1.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/customer-part1.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/customer.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/customer.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/customer.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/customer.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/lineitem.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/lineitem.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/lineitem.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/lineitem.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/nation.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/nation.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/nation.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/nation.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/orders-part1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/orders-part1.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/orders-part1.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/orders-part1.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/part.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/part.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/part.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/part.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/partsupp.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/partsupp.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/partsupp.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/partsupp.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/region.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/region.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/region.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/region.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/supplier.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/supplier.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/supplier.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/supplier.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/tpch.ddl b/hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/tpch.ddl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/tpch.ddl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device0/data/tpch0.001/tpch.ddl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/customer-part2.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device1/data/tpch0.001/customer-part2.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/customer-part2.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device1/data/tpch0.001/customer-part2.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/orders-part2.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device1/data/tpch0.001/orders-part2.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/orders-part2.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device1/data/tpch0.001/orders-part2.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/orders.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/device1/data/tpch0.001/orders.tbl
similarity index 100%
rename from hyracks-fullstack/algebricks/algebricks-tests/data/tpch0.001/orders.tbl
rename to hyracks-fullstack/algebricks/algebricks-tests/data/device1/data/tpch0.001/orders.tbl
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
index 52eda0b..f49189d 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
@@ -163,5 +163,9 @@
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
index af6ddca..34ed142 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
@@ -22,7 +22,9 @@
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
+import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.algebricks.data.IPrinterFactory;
 import org.apache.hyracks.algebricks.data.impl.BinaryBooleanInspectorImpl;
 import org.apache.hyracks.algebricks.data.impl.BinaryIntegerInspectorImpl;
@@ -64,8 +66,11 @@
 import org.apache.hyracks.api.dataflow.value.IMissingWriterFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
@@ -100,6 +105,7 @@
     private static final int FRAME_SIZE = 32768;
 
     private static final String[] DEFAULT_NODES = new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID };
+    private static final AtomicInteger aInteger = new AtomicInteger(0);
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -180,8 +186,8 @@
 
         // the scanner
         FileSplit[] intFileSplits = new FileSplit[1];
-        intFileSplits[0] = new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, new File("data/simple/int-part1.tbl")
-                .getAbsolutePath(), false);
+        intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+                + "simple" + File.separator + "int-part1.tbl");
         IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(intFileSplits);
         RecordDescriptor intScannerDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
@@ -263,8 +269,8 @@
 
         // the scanner
         FileSplit[] fileSplits = new FileSplit[1];
-        fileSplits[0] = new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, new File("data/tpch0.001/customer.tbl")
-                .getAbsolutePath(), false);
+        fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl");
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
 
         RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -345,8 +351,8 @@
 
         // the scanner
         FileSplit[] fileSplits = new FileSplit[1];
-        fileSplits[0] = new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
-                new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false);
+        fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+                "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl");
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
         RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -397,8 +403,8 @@
 
         // the scanner
         FileSplit[] fileSplits = new FileSplit[1];
-        fileSplits[0] = new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
-                new File("data/tpch0.001/customer.tbl").getAbsolutePath(), false);
+        fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl");
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
         RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -575,15 +581,10 @@
 
         JobSpecification spec = new JobSpecification(FRAME_SIZE);
 
-        String inputFileName = "data/tpch0.001/customer.tbl";
-        File inputFile = new File(inputFileName);
-        File[] outputFile = new File[outputArity];
-        for (int i = 0; i < outputArity; i++) {
-            outputFile[i] = File.createTempFile("splitop", null);
-        }
+        String inputFileName = "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl";
 
         FileSplit[] inputSplits = new FileSplit[] {
-                new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFile.getAbsolutePath(), false) };
+                new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName) };
 
         DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(
                 new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '\u0000');
@@ -601,10 +602,12 @@
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, replicateOp,
                 new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
 
-        IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
+        IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputArity];
+        File[] outputFile = new File[outputArity];
         for (int i = 0; i < outputArity; i++) {
-            outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] {
-                    new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, outputFile[i].getAbsolutePath(), false) });
+            FileSplit fileSplit = createFile(AlgebricksHyracksIntegrationUtil.nc1);
+            outputFile[i] = fileSplit.getFile(AlgebricksHyracksIntegrationUtil.nc1.getIoManager());
+            outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] { fileSplit });
             PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i],
                     new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
         }
@@ -620,29 +623,41 @@
         AlgebricksHyracksIntegrationUtil.runJob(spec);
 
         for (int i = 0; i < outputArity; i++) {
-            compareFiles(inputFileName, outputFile[i].getAbsolutePath());
+            compareFiles("data" + File.separator + "device0" + File.separator + inputFileName, outputFile[i]
+                    .getAbsolutePath());
         }
     }
 
+    public FileSplit createFile(NodeControllerService ncs) throws IOException {
+        String fileName = "f" + aInteger.getAndIncrement() + ".tmp";
+        FileReference fileRef = ncs.getIoManager().getFileReference(0, fileName);
+        FileUtils.deleteQuietly(fileRef.getFile());
+        fileRef.getFile().createNewFile();
+        return new ManagedFileSplit(ncs.getId(), fileName);
+    }
+
     @Test
     public void scanSplitWrite() throws Exception {
         final int outputArity = 2;
 
         JobSpecification spec = new JobSpecification(FRAME_SIZE);
 
-        String inputFileName[] = { "data/simple/int-string-part1.tbl", "data/simple/int-string-part1-split-0.tbl",
-                "data/simple/int-string-part1-split-1.tbl" };
+        String inputFileName[] = { "data" + File.separator + "simple" + File.separator + "int-string-part1.tbl", "data"
+                + File.separator + "simple" + File.separator + "int-string-part1-split-0.tbl",
+                "data" + File.separator + "simple" + File.separator + "int-string-part1-split-1.tbl" };
         File[] inputFiles = new File[inputFileName.length];
-        for (int i=0; i<inputFileName.length; i++) {
+        for (int i = 0; i < inputFileName.length; i++) {
             inputFiles[i] = new File(inputFileName[i]);
         }
         File[] outputFile = new File[outputArity];
+        FileSplit[] outputFileSplit = new FileSplit[outputArity];
         for (int i = 0; i < outputArity; i++) {
-            outputFile[i] = File.createTempFile("splitop", null);
+            outputFileSplit[i] = createFile(AlgebricksHyracksIntegrationUtil.nc1);
+            outputFile[i] = outputFileSplit[i].getFile(AlgebricksHyracksIntegrationUtil.nc1.getIoManager());
         }
 
         FileSplit[] inputSplits = new FileSplit[] {
-                new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFiles[0].getAbsolutePath(), false) };
+                new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName[0]) };
         IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(inputSplits);
 
         RecordDescriptor scannerDesc = new RecordDescriptor(
@@ -665,8 +680,7 @@
 
         IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
         for (int i = 0; i < outputArity; i++) {
-            outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] {
-                    new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, outputFile[i].getAbsolutePath(), false) });
+            outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] { outputFileSplit[i] });
             PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i],
                     new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
         }
@@ -682,7 +696,8 @@
         AlgebricksHyracksIntegrationUtil.runJob(spec);
 
         for (int i = 0; i < outputArity; i++) {
-            compareFiles(inputFileName[i + 1], outputFile[i].getAbsolutePath());
+            compareFiles("data" + File.separator + "device0" + File.separator + inputFileName[i + 1], outputFile[i]
+                    .getAbsolutePath());
         }
     }
 
@@ -692,8 +707,8 @@
 
         // the scanner
         FileSplit[] fileSplits = new FileSplit[1];
-        fileSplits[0] = new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, new File("data/tpch0.001/nation.tbl")
-                .getAbsolutePath(), false);
+        fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "nation.tbl");
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
         RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -801,8 +816,8 @@
 
         // the scanner
         FileSplit[] fileSplits = new FileSplit[1];
-        fileSplits[0] = new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
-                new File("data/tpch0.001/customer.tbl").getAbsolutePath(), false);
+        fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+                "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
         RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
index 17e1589..9880047 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
@@ -18,8 +18,10 @@
  */
 package org.apache.hyracks.algebricks.tests.util;
 
+import java.io.File;
 import java.util.EnumSet;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
 import org.apache.hyracks.api.client.HyracksConnection;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
@@ -40,11 +42,13 @@
     public static final int TEST_HYRACKS_CC_CLIENT_NET_PORT = 4321;
 
     private static ClusterControllerService cc;
-    private static NodeControllerService nc1;
-    private static NodeControllerService nc2;
+    public static NodeControllerService nc1;
+    public static NodeControllerService nc2;
     private static IHyracksClientConnection hcc;
 
     public static void init() throws Exception {
+        FileUtils.deleteQuietly(new File("target" + File.separator + "data"));
+        FileUtils.copyDirectory(new File("data"), new File("target" + File.separator + "data"));
         CCConfig ccConfig = new CCConfig();
         ccConfig.clientNetIpAddress = "127.0.0.1";
         ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_NET_PORT;
@@ -60,6 +64,9 @@
         ncConfig1.dataIPAddress = "127.0.0.1";
         ncConfig1.resultIPAddress = "127.0.0.1";
         ncConfig1.nodeId = NC1_ID;
+        ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
+                + File.separator + "device0";
+        FileUtils.forceMkdir(new File(ncConfig1.ioDevices));
         nc1 = new NodeControllerService(ncConfig1);
         nc1.start();
 
@@ -70,6 +77,9 @@
         ncConfig2.dataIPAddress = "127.0.0.1";
         ncConfig2.resultIPAddress = "127.0.0.1";
         ncConfig2.nodeId = NC2_ID;
+        ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
+                + File.separator + "device1";
+        FileUtils.forceMkdir(new File(ncConfig1.ioDevices));
         nc2 = new NodeControllerService(ncConfig2);
         nc2.start();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/FileSplit.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/FileSplit.java
index bd4567b..4754731 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/FileSplit.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/FileSplit.java
@@ -24,31 +24,24 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 /**
- * A node and a path (Can be relative: inside the IO device or absolute inside or outside IO devices)
+ * A node and a path (Can be managed: inside the IO device or absolute inside or outside IO devices)
  * Used to identify a file/dir across the cluster.
  */
-public class FileSplit implements Serializable {
+public abstract class FileSplit implements Serializable {
+
     private static final long serialVersionUID = 1L;
     private final String node;
     private final String path;
-    private final boolean managed;
 
     /**
      * Constructor
      *
      * @param node
      * @param path
-     * @param maanged
      */
-    public FileSplit(String node, String path, boolean relative) {
+    protected FileSplit(String node, String path) {
         this.node = node;
         this.path = path;
-        this.managed = relative;
-    }
-
-    @Override
-    public String toString() {
-        return "Node: " + node + " " + (managed ? "managed" : "absolute") + " path: " + path;
     }
 
     /**
@@ -59,33 +52,13 @@
     }
 
     /**
-     * @return true if relative
-     */
-    public boolean isManaged() {
-        return managed;
-    }
-
-    /**
      * Get the local file represented by this split
      *
      * @param ioManager
      * @return
      * @throws HyracksDataException
      */
-    public File getFile(IIOManager ioManager) throws HyracksDataException {
-        return managed ? getFileRef(ioManager).getFile() : new File(path);
-    }
-
-    /**
-     * Get the file reference for the split
-     *
-     * @param ioManager
-     * @return
-     * @throws HyracksDataException
-     */
-    public FileReference getFileRef(IIOManager ioManager) throws HyracksDataException {
-        return ioManager.getFileRef(path, managed);
-    }
+    public abstract File getFile(IIOManager ioManager) throws HyracksDataException;
 
     /**
      * @return the node
@@ -93,4 +66,8 @@
     public String getNodeName() {
         return node;
     }
+
+    // TODO(amoudi): This method should be removed from this class and moved to ManagedFileSplit since it is only
+    // applicable for that subclass
+    public abstract FileReference getFileReference(IIOManager ioManager) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java
index 0b3f71a..50ef4f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IIOManager.java
@@ -63,18 +63,36 @@
 
     /**
      * @param ioDeviceId
-     * @param relativePath
+     * @param path
      * @return A file reference based on the mounting point of {@code ioDeviceId} and the passed {@code relativePath}
      */
-    public FileReference getFileRef(int ioDeviceId, String relativePath);
+    FileReference getFileReference(int ioDeviceId, String path);
 
     /**
-     * A file reference based on the mounting point of {@code ioDeviceId} and the passed {@code relativePath}
+     * determine which IO device holds the path and returns a FileReference based on that
      *
      * @param path
-     * @param relative
-     * @return
+     * @return A file reference based on the mounting point of {@code ioDeviceId} and the passed {@code Path}
      * @throws HyracksDataException
      */
-    public FileReference getFileRef(String path, boolean relative) throws HyracksDataException;
+    FileReference resolve(String path) throws HyracksDataException;
+
+    /**
+     * Gets a file reference from an absolute path
+     * @deprecated
+     *             use getFileRef(int ioDeviceId, String path) instead
+     * @param path
+     * @return A file reference based on the mounting point of {@code ioDeviceId} and the passed {@code relativePath}
+     * @throws HyracksDataException
+     */
+    @Deprecated
+    FileReference resolveAbsolutePath(String path) throws HyracksDataException;
+
+    /**
+     * Create a workspace file with the given prefix
+     * @param prefix
+     * @return A FileReference for the created workspace file
+     * @throws HyracksDataException
+     */
+    FileReference createWorkspaceFile(String prefix) throws HyracksDataException;
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/ManagedFileSplit.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/ManagedFileSplit.java
new file mode 100644
index 0000000..511e47f
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/ManagedFileSplit.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.io;
+
+import java.io.File;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * A node and a managed path inside an IO device.
+ */
+public class ManagedFileSplit extends FileSplit {
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * Constructor
+     *
+     * @param node
+     * @param path
+     */
+    public ManagedFileSplit(String node, String path) {
+        super(node, path);
+    }
+
+    @Override
+    public String toString() {
+        return "Node: " + getNodeName() + " managed path: " + getPath();
+    }
+
+    /**
+     * Get the local file represented by this split
+     *
+     * @param ioManager
+     * @return
+     * @throws HyracksDataException
+     */
+    @Override
+    public File getFile(IIOManager ioManager) throws HyracksDataException {
+        return getFileReference(ioManager).getFile();
+    }
+
+    /**
+     * Get the file reference for the split
+     *
+     * @param ioManager
+     * @return
+     * @throws HyracksDataException
+     */
+    @Override
+    public FileReference getFileReference(IIOManager ioManager) throws HyracksDataException {
+        return ioManager.resolve(getPath());
+    }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/MappedFileSplit.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/MappedFileSplit.java
new file mode 100644
index 0000000..0a0e026
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/MappedFileSplit.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.io;
+
+import java.io.File;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * A FileSplit that is mapped to a specific IO device
+ */
+public class MappedFileSplit extends ManagedFileSplit {
+
+    private static final long serialVersionUID = 1L;
+    private final int ioDevice;
+    private transient FileReference cached;
+
+    /**
+     * Construct a managed File split that is mapped to an IO device
+     * @param node
+     * @param path
+     * @param ioDevice
+     */
+    public MappedFileSplit(String node, String path, int ioDevice) {
+        super(node, path);
+        this.ioDevice = ioDevice;
+    }
+
+    public int getIoDevice() {
+        return ioDevice;
+    }
+
+    @Override
+    public String toString() {
+        return "Node: " + getNodeName() + " IO Device: " + ioDevice + " managed path: " + getPath();
+    }
+
+    @Override
+    public File getFile(IIOManager ioManager) throws HyracksDataException {
+        return getFileReference(ioManager).getFile();
+    }
+
+    @Override
+    public FileReference getFileReference(IIOManager ioManager) throws HyracksDataException {
+        if (cached == null) {
+            cached = new FileReference(ioManager.getIODevices().get(ioDevice), getPath());
+        }
+        return cached;
+    }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/UnmanagedFileSplit.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/UnmanagedFileSplit.java
new file mode 100644
index 0000000..9a6f434
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/UnmanagedFileSplit.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.io;
+
+import java.io.File;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * A filesplit for files that are not managed by the IO device
+ */
+public class UnmanagedFileSplit extends FileSplit {
+
+    private static final long serialVersionUID = 1L;
+
+    public UnmanagedFileSplit(String node, String path) {
+        super(node, path);
+    }
+
+    @Override
+    public String toString() {
+        return "Node: " + getNodeName() + " unmanaged path: " + getPath();
+    }
+
+    /**
+     * Get the local file represented by this split
+     *
+     * @param ioManager
+     * @return
+     * @throws HyracksDataException
+     */
+    @Override
+    public File getFile(IIOManager ioManager) {
+        return new File(getPath());
+    }
+
+    /**
+     * Get the local file represented by this unmanaged file split
+     *
+     * @return
+     */
+    public File getFile() {
+        return new File(getPath());
+    }
+
+    @Override
+    public FileReference getFileReference(IIOManager ioManager) throws HyracksDataException {
+        throw new HyracksDataException("FileReference is only for files inside an IO device");
+    }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
index 98ae7c4..25ab245 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
@@ -56,7 +56,6 @@
 import org.apache.hyracks.control.common.job.profiling.counters.Counter;
 import org.apache.hyracks.control.common.job.profiling.om.JobletProfile;
 import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
-import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.control.nc.io.WorkspaceFileFactory;
 import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
 import org.apache.hyracks.control.nc.resources.memory.FrameManager;
@@ -115,7 +114,7 @@
         taskMap = new HashMap<>();
         counterMap = new HashMap<>();
         deallocatableRegistry = new DefaultDeallocatableRegistry();
-        fileFactory = new WorkspaceFileFactory(this, (IOManager) appCtx.getIoManager());
+        fileFactory = new WorkspaceFileFactory(this, appCtx.getIoManager());
         cleanupPending = false;
         IJobletEventListenerFactory jelf = acg.getJobletEventListenerFactory();
         if (jelf != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index 598110a..f00303f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -28,11 +28,9 @@
 import java.lang.management.ThreadMXBean;
 import java.lang.reflect.Field;
 import java.net.InetSocketAddress;
-import java.util.ArrayList;
 import java.util.Hashtable;
 import java.util.List;
 import java.util.Map;
-import java.util.StringTokenizer;
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.concurrent.ExecutorService;
@@ -157,7 +155,7 @@
                 new NodeControllerIPCI(this),
                 new CCNCFunctions.SerializerDeserializer());
 
-        ioManager = new IOManager(getDevices(ncConfig.ioDevices));
+        ioManager = new IOManager(IODeviceHandle.getDevices(ncConfig.ioDevices));
         if (id == null) {
             throw new Exception("id not set");
         }
@@ -195,16 +193,6 @@
         return lccm;
     }
 
-    private static List<IODeviceHandle> getDevices(String ioDevices) {
-        List<IODeviceHandle> devices = new ArrayList<>();
-        StringTokenizer tok = new StringTokenizer(ioDevices, ",");
-        while (tok.hasMoreElements()) {
-            String devPath = tok.nextToken().trim();
-            devices.add(new IODeviceHandle(new File(devPath), "."));
-        }
-        return devices;
-    }
-
     synchronized void setNodeRegistrationResult(NodeParameters parameters, Exception exception) {
         this.nodeParameters = parameters;
         this.registrationException = exception;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
index 7c626c1..71beab4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
@@ -58,7 +58,6 @@
 import org.apache.hyracks.control.common.job.profiling.om.PartitionProfile;
 import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
 import org.apache.hyracks.control.common.utils.ExceptionUtils;
-import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.control.nc.io.WorkspaceFileFactory;
 import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
 import org.apache.hyracks.control.nc.work.NotifyTaskCompleteWork;
@@ -105,12 +104,12 @@
         this.taskAttemptId = taskId;
         this.displayName = displayName;
         this.executorService = executor;
-        fileFactory = new WorkspaceFileFactory(this, (IOManager) joblet.getIOManager());
+        fileFactory = new WorkspaceFileFactory(this, joblet.getIOManager());
         deallocatableRegistry = new DefaultDeallocatableRegistry();
-        counterMap = new HashMap<String, Counter>();
+        counterMap = new HashMap<>();
         opEnv = joblet.getEnvironment();
-        partitionSendProfile = new Hashtable<PartitionId, PartitionProfile>();
-        pendingThreads = new LinkedHashSet<Thread>();
+        partitionSendProfile = new Hashtable<>();
+        pendingThreads = new LinkedHashSet<>();
         exceptions = new ArrayList<>();
         this.ncs = ncs;
         this.inputChannelsFromConnectors = inputChannelsFromConnectors;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java
index bdb283a..80bb662 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/IOManager.java
@@ -29,7 +29,6 @@
 import java.util.concurrent.Executor;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IFileDeviceComputer;
 import org.apache.hyracks.api.io.IFileHandle;
@@ -55,12 +54,12 @@
     private int workspaceIndex;
     private IFileDeviceComputer deviceComputer;
 
-    public IOManager(List<IODeviceHandle> devices, Executor executor) throws HyracksException {
+    public IOManager(List<IODeviceHandle> devices, Executor executor) throws HyracksDataException {
         this(devices);
         this.executor = executor;
     }
 
-    public IOManager(List<IODeviceHandle> devices) throws HyracksException {
+    public IOManager(List<IODeviceHandle> devices) throws HyracksDataException {
         this.ioDevices = Collections.unmodifiableList(devices);
         workspaces = new ArrayList<>();
         for (IODeviceHandle d : ioDevices) {
@@ -70,7 +69,7 @@
             }
         }
         if (workspaces.isEmpty()) {
-            throw new HyracksException("No devices with work areas found");
+            throw new HyracksDataException("No devices with workspace found");
         }
         workspaceIndex = 0;
         deviceComputer = new DefaultDeviceComputer(this);
@@ -165,7 +164,7 @@
      * @param offset
      * @param data
      * @return The number of bytes read, possibly zero, or -1 if the given offset is greater than or equal to the file's
-     * current size
+     *         current size
      * @throws HyracksDataException
      */
     @Override
@@ -213,6 +212,7 @@
         }
     }
 
+    @Override
     public synchronized FileReference createWorkspaceFile(String prefix) throws HyracksDataException {
         IODeviceHandle dev = workspaces.get(workspaceIndex);
         workspaceIndex = (workspaceIndex + 1) % workspaces.size();
@@ -328,16 +328,18 @@
     }
 
     @Override
-    public FileReference getFileRef(int ioDeviceId, String relativePath) {
+    public synchronized FileReference getFileReference(int ioDeviceId, String relativePath) {
         IODeviceHandle devHandle = ioDevices.get(ioDeviceId);
         return new FileReference(devHandle, relativePath);
     }
 
     @Override
-    public FileReference getFileRef(String path, boolean relative) throws HyracksDataException {
-        if (relative) {
-            return new FileReference(deviceComputer.compute(path), path);
-        }
+    public FileReference resolve(String path) throws HyracksDataException {
+        return new FileReference(deviceComputer.compute(path), path);
+    }
+
+    @Override
+    public FileReference resolveAbsolutePath(String path) throws HyracksDataException {
         IODeviceHandle devHandle = getDevice(path);
         if (devHandle == null) {
             throw new HyracksDataException("The file with absolute path: " + path + " is outside all io devices");
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/WorkspaceFileFactory.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/WorkspaceFileFactory.java
index 4f9eb29..0dedbbf 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/WorkspaceFileFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/WorkspaceFileFactory.java
@@ -21,15 +21,16 @@
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.io.IWorkspaceFileFactory;
 import org.apache.hyracks.api.resources.IDeallocatable;
 import org.apache.hyracks.api.resources.IDeallocatableRegistry;
 
 public final class WorkspaceFileFactory implements IWorkspaceFileFactory {
     private final IDeallocatableRegistry registry;
-    private final IOManager ioManager;
+    private final IIOManager ioManager;
 
-    public WorkspaceFileFactory(IDeallocatableRegistry registry, IOManager ioManager) {
+    public WorkspaceFileFactory(IDeallocatableRegistry registry, IIOManager ioManager) {
         this.registry = registry;
         this.ioManager = ioManager;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartition.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartition.java
index 20b5bbe..bcfe517 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartition.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartition.java
@@ -37,10 +37,10 @@
 
     private final Executor executor;
 
-    private final IOManager ioManager;
+    private final IIOManager ioManager;
 
     public MaterializedPartition(IHyracksTaskContext ctx, FileReference partitionFile, Executor executor,
-            IOManager ioManager) {
+            IIOManager ioManager) {
         this.ctx = ctx;
         this.partitionFile = partitionFile;
         this.executor = executor;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
index 74aa350..37aed0a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
@@ -32,7 +32,6 @@
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.partitions.PartitionId;
 import org.apache.hyracks.control.common.job.PartitionState;
-import org.apache.hyracks.control.nc.io.IOManager;
 
 public class MaterializedPartitionWriter implements IFrameWriter {
     private static final Logger LOGGER = Logger.getLogger(MaterializedPartitionWriter.class.getName());
@@ -98,7 +97,7 @@
         }
         if (!failed) {
             manager.registerPartition(pid, taId,
-                    new MaterializedPartition(ctx, fRef, executor, (IOManager) ctx.getIOManager()),
+                    new MaterializedPartition(ctx, fRef, executor, ctx.getIOManager()),
                     PartitionState.COMMITTED, taId.getAttempt() == 0 ? false : true);
 
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
index 4713aa6..137ef37 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
@@ -33,7 +33,6 @@
 import org.apache.hyracks.api.partitions.IPartition;
 import org.apache.hyracks.api.partitions.PartitionId;
 import org.apache.hyracks.control.common.job.PartitionState;
-import org.apache.hyracks.control.nc.io.IOManager;
 
 public class MaterializingPipelinedPartition implements IFrameWriter, IPartition {
     private static final Logger LOGGER = Logger.getLogger(MaterializingPipelinedPartition.class.getName());
@@ -42,7 +41,7 @@
 
     private final Executor executor;
 
-    private final IOManager ioManager;
+    private final IIOManager ioManager;
 
     private final PartitionManager manager;
 
@@ -68,7 +67,7 @@
             TaskAttemptId taId, Executor executor) {
         this.ctx = ctx;
         this.executor = executor;
-        this.ioManager = (IOManager) ctx.getIOManager();
+        this.ioManager = ctx.getIOManager();
         this.manager = manager;
         this.pid = pid;
         this.taId = taId;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index 39a30e1..e44feb7 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -84,7 +84,6 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-nc</artifactId>
       <version>${project.version}</version>
-      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
index 5028ccc..2207441 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
@@ -24,6 +24,7 @@
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.base.IOpenableDataWriterOperator;
@@ -31,10 +32,12 @@
 
 public abstract class AbstractFileWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
     protected class FileWriteOperator implements IOpenableDataWriterOperator {
+        private final IIOManager ioManager;
         private int index;
         private IRecordWriter writer;
 
-        FileWriteOperator(int index) {
+        FileWriteOperator(IIOManager ioManager, int index) {
+            this.ioManager = ioManager;
             this.index = index;
         }
 
@@ -46,11 +49,7 @@
         @Override
         public void open() throws HyracksDataException {
             FileSplit split = splits[index];
-            try {
-                writer = createRecordWriter(split, index);
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
+            writer = createRecordWriter(ioManager, split, index);
         }
 
         @Override
@@ -64,12 +63,7 @@
 
         @Override
         public void writeData(Object[] data) throws HyracksDataException {
-            try {
-                writer.write(data);
-
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
+            writer.write(data);
         }
 
         @Override
@@ -95,12 +89,13 @@
         this.splits = splits;
     }
 
-    protected abstract IRecordWriter createRecordWriter(FileSplit fileSplit, int index) throws Exception;
+    protected abstract IRecordWriter createRecordWriter(IIOManager ioManager, FileSplit fileSplit, int index)
+            throws HyracksDataException;
 
     @Override
     public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
             IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(partition),
-                recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
+        return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIOManager(),
+                partition), recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
index 94c5a68..8acb182 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
@@ -73,7 +73,7 @@
             @Override
             public void initialize() throws HyracksDataException {
                 // will only work for files inside the io devices
-                File f = ioManager.getFileRef(split.getPath(), split.isManaged()).getFile();
+                File f = split.getFile(ioManager);
                 if (quietly) {
                     FileUtils.deleteQuietly(f);
                 } else {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IFileSplitProvider.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IFileSplitProvider.java
index a85e76e..016ba46 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IFileSplitProvider.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IFileSplitProvider.java
@@ -22,6 +22,7 @@
 
 import org.apache.hyracks.api.io.FileSplit;
 
+@FunctionalInterface
 public interface IFileSplitProvider extends Serializable {
-    public FileSplit[] getFileSplits();
+    FileSplit[] getFileSplits();
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IRecordWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IRecordWriter.java
index 845b012..4020d15 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IRecordWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/IRecordWriter.java
@@ -18,12 +18,12 @@
  */
 package org.apache.hyracks.dataflow.std.file;
 
-import java.io.File;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IRecordWriter {
 
     public void close();
 
-    public void write(Object[] record) throws Exception;
+    public void write(Object[] record) throws HyracksDataException;
 
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java
index 1186283..5886eb4 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/LineFileWriteOperatorDescriptor.java
@@ -19,25 +19,30 @@
 package org.apache.hyracks.dataflow.std.file;
 
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.OutputStream;
 
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 
 public class LineFileWriteOperatorDescriptor extends AbstractFileWriteOperatorDescriptor {
     private static final long serialVersionUID = 1L;
 
     private static class LineWriterImpl extends RecordWriter {
-        LineWriterImpl(File file, int[] columns, char separator) throws Exception {
+        LineWriterImpl(File file, int[] columns, char separator) throws HyracksDataException {
             super(columns, separator, new Object[] { file });
         }
 
-        private static final long serialVersionUID = 1L;
-
         @Override
-        public OutputStream createOutputStream(Object[] args) throws Exception {
-            return new FileOutputStream((File) args[0]);
+        public OutputStream createOutputStream(Object[] args) throws HyracksDataException {
+            try {
+                return new FileOutputStream((File) args[0]);
+            } catch (FileNotFoundException e) {
+                throw new HyracksDataException(e);
+            }
         }
     }
 
@@ -52,14 +57,16 @@
         this(spec, splits, columns, RecordWriter.COMMA);
     }
 
-    public LineFileWriteOperatorDescriptor(IOperatorDescriptorRegistry spec, FileSplit[] splits, int[] columns, char separator) {
+    public LineFileWriteOperatorDescriptor(IOperatorDescriptorRegistry spec, FileSplit[] splits, int[] columns,
+            char separator) {
         super(spec, splits);
         this.columns = columns;
         this.separator = separator;
     }
 
     @Override
-    protected IRecordWriter createRecordWriter(FileSplit fileSplit, int index) throws Exception {
-        return new LineWriterImpl(fileSplit.getFile(null), columns, separator);
+    protected IRecordWriter createRecordWriter(IIOManager ioManager, FileSplit fileSplit, int index)
+            throws HyracksDataException {
+        return new LineWriterImpl(fileSplit.getFile(ioManager), columns, separator);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java
index 97f12b5..86d0e3f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordFileScanOperatorDescriptor.java
@@ -31,7 +31,8 @@
 public class RecordFileScanOperatorDescriptor extends AbstractDeserializedFileScanOperatorDescriptor {
     private static final long serialVersionUID = 1L;
 
-    public RecordFileScanOperatorDescriptor(IOperatorDescriptorRegistry spec, FileSplit[] splits, RecordDescriptor recordDescriptor) {
+    public RecordFileScanOperatorDescriptor(IOperatorDescriptorRegistry spec, FileSplit[] splits,
+            RecordDescriptor recordDescriptor) {
         super(spec, splits, recordDescriptor);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordWriter.java
index 2f6b53e..b3c688a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/RecordWriter.java
@@ -23,6 +23,7 @@
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.util.StringSerializationUtils;
 
 public abstract class RecordWriter implements IRecordWriter {
@@ -44,7 +45,7 @@
         this.separator = COMMA;
     }
 
-    public RecordWriter(int[] columns, char separator, Object[] args) throws Exception {
+    public RecordWriter(int[] columns, char separator, Object[] args) throws HyracksDataException {
         OutputStream outputStream = createOutputStream(args);
         if (outputStream != null) {
             bufferedWriter = new BufferedWriter(new OutputStreamWriter(outputStream));
@@ -65,25 +66,29 @@
     }
 
     @Override
-    public void write(Object[] record) throws Exception {
-        if (columns == null) {
-            for (int i = 0; i < record.length; ++i) {
-                if (i != 0) {
-                    bufferedWriter.write(separator);
+    public void write(Object[] record) throws HyracksDataException {
+        try {
+            if (columns == null) {
+                for (int i = 0; i < record.length; ++i) {
+                    if (i != 0) {
+                        bufferedWriter.write(separator);
+                    }
+                    bufferedWriter.write(StringSerializationUtils.toString(record[i]));
                 }
-                bufferedWriter.write(StringSerializationUtils.toString(record[i]));
-            }
-        } else {
-            for (int i = 0; i < columns.length; ++i) {
-                if (i != 0) {
-                    bufferedWriter.write(separator);
+            } else {
+                for (int i = 0; i < columns.length; ++i) {
+                    if (i != 0) {
+                        bufferedWriter.write(separator);
+                    }
+                    bufferedWriter.write(StringSerializationUtils.toString(record[columns[i]]));
                 }
-                bufferedWriter.write(StringSerializationUtils.toString(record[columns[i]]));
             }
+            bufferedWriter.write("\n");
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
         }
-        bufferedWriter.write("\n");
     }
 
-    public abstract OutputStream createOutputStream(Object[] args) throws Exception;
+    public abstract OutputStream createOutputStream(Object[] args) throws HyracksDataException;
 
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
index c6e81a4..6082e7d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -78,9 +78,6 @@
 
         @Option(name = "-frame-size", usage = "Hyracks frame size (default: 32768)", required = false)
         public int frameSize = 32768;
-
-        @Option(name = "-relative", usage = "Whether the tree file names are relative", required = false)
-        public boolean relative = true;
     }
 
     public static void main(String[] args) throws Exception {
@@ -150,8 +147,7 @@
         int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input
                                                         // tuple to field 0 of
                                                         // B-Tree tuple, etc.
-        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName,
-                options.relative);
+        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
 
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory(true);
 
@@ -178,7 +174,7 @@
         // tuple
         int[] secondaryFieldPermutation = { 1, 2 };
         IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
-                options.secondaryBTreeName, true);
+                options.secondaryBTreeName);
         // create operator descriptor
         TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                 spec, recDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/JobHelper.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/JobHelper.java
index 05143a6..4b219e8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/JobHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/JobHelper.java
@@ -22,17 +22,17 @@
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 
 public class JobHelper {
-    public static IFileSplitProvider createFileSplitProvider(String[] splitNCs, String btreeFileName,
-            boolean relative) {
+    public static IFileSplitProvider createFileSplitProvider(String[] splitNCs, String btreeFileName) {
         FileSplit[] fileSplits = new FileSplit[splitNCs.length];
         for (int i = 0; i < splitNCs.length; ++i) {
             String fileName = btreeFileName + "." + splitNCs[i];
-            fileSplits[i] = new FileSplit(splitNCs[i], fileName, relative);
+            fileSplits[i] = new ManagedFileSplit(splitNCs[i], fileName);
         }
         return new ConstantFileSplitProvider(fileSplits);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index c455c0d..13d54db 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -76,9 +76,6 @@
 
         @Option(name = "-frame-size", usage = "Hyracks frame size (default: 32768)", required = false)
         public int frameSize = 32768;
-
-        @Option(name = "-relative", usage = "Whether the tree file names are relative", required = false)
-        public boolean relative = true;
     }
 
     public static void main(String[] args) throws Exception {
@@ -154,8 +151,7 @@
         int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple
                                                  // to field 0 of B-Tree tuple,
                                                  // etc.
-        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName,
-                options.relative);
+        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory(true);
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, recDesc,
                 storageManager, lcManagerProvider, btreeSplitProvider, typeTraits, comparatorFactories, null,
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index 1867a80..3ab3b5f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -69,9 +69,6 @@
 
         @Option(name = "-frame-size", usage = "Hyracks frame size (default: 32768)", required = false)
         public int frameSize = 32768;
-
-        @Option(name = "-relative", usage = "Whether the tree file names are relative", required = false)
-        public boolean relative = true;
     }
 
     public static void main(String[] args) throws Exception {
@@ -144,8 +141,7 @@
         int[] highKeyFields = { 1 }; // low key is in field 1 of tuples going
                                      // into search op
 
-        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName,
-                options.relative);
+        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory(true);
         BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
                 lcManagerProvider, btreeSplitProvider, typeTraits, comparatorFactories, null, lowKeyFields,
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 2acc204..9fb59ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -72,9 +72,6 @@
 
         @Option(name = "-frame-size", usage = "Hyracks frame size (default: 32768)", required = false)
         public int frameSize = 32768;
-
-        @Option(name = "-relative", usage = "Whether the tree file names are relative", required = false)
-        public boolean relative = true;
     }
 
     public static void main(String[] args) throws Exception {
@@ -125,8 +122,7 @@
         comparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
         // use a disk-order scan to read primary index
-        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName,
-                options.relative);
+        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory(true);
         TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
                 recDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
@@ -149,8 +145,7 @@
         // the B-Tree expects its keyfields to be at the front of its input
         // tuple
         int[] fieldPermutation = { 1, 0 };
-        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName,
-                options.relative);
+        IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, null,
                 storageManager, lcManagerProvider, btreeSplitProvider, secondaryTypeTraits, comparatorFactories, null,
                 fieldPermutation, 0.7f, false, 1000L, true, dataflowHelperFactory);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 5e140ce..85ebef6 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -72,9 +72,6 @@
 
         @Option(name = "-frame-size", usage = "Hyracks frame size (default: 32768)", required = false)
         public int frameSize = 32768;
-
-        @Option(name = "-relative", usage = "Whether the tree file names are relative", required = false)
-        public boolean relative = true;
     }
 
     public static void main(String[] args) throws Exception {
@@ -170,7 +167,7 @@
                                               // index search op
 
         IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
-                options.secondaryBTreeName, options.relative);
+                options.secondaryBTreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory(true);
         BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
                 storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
@@ -188,8 +185,7 @@
                                             // going into primary index search
                                             // op
 
-        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName,
-                options.relative);
+        IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
                 storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
                 null, primaryLowKeyFields, primaryHighKeyFields, true, true, dataflowHelperFactory, false, false, null,
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/words.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/nc1/words.txt
similarity index 100%
copy from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/words.txt
copy to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/nc1/words.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/orders-with-locations-part1.txt
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/orders-with-locations-part1.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/orders-with-locations-part2.txt
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/orders-with-locations-part2.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part1.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer-part1.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part1.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer-part1.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer3.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer3.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer3.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer3.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer4.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer4.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer4.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/customer4.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/lineitem.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/lineitem.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/nation.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/nation.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/nation.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/nation.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part1.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/orders-part1.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part1.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/orders-part1.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/part.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/part.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/part.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/part.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/partsupp.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/partsupp.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/partsupp.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/partsupp.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/region.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/region.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/region.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/region.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/supplier.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/supplier.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/supplier.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/supplier.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/tpch.ddl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/tpch.ddl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/tpch.ddl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.001/tpch.ddl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.002/orders-part2.tbl
similarity index 100%
copy from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl
copy to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device0/data/tpch0.002/orders-part2.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/cleanednumbereddblptitles.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/cleanednumbereddblptitles.txt
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/cleanednumbereddblptitles.txt
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/cleanednumbereddblptitles.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/dblp.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/dblp.txt
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/dblp.txt
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/dblp.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/spatial.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/spatial.txt
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/spatial.txt
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/spatial.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part2.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/customer-part2.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/customer-part2.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/customer-part2.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders-part2.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders-part2.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders-part2.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders1.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders1.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders1.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders1.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders4.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders4.tbl
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/orders4.tbl
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.001/orders4.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.002/lineitem.tbl
similarity index 100%
copy from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/tpch0.001/lineitem.tbl
copy to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/tpch0.002/lineitem.tbl
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/wordcount.tsv b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/wordcount.tsv
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/wordcount.tsv
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/wordcount.tsv
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/words.txt b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/words.txt
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/words.txt
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/data/device1/data/words.txt
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
index c2b5388..0c0cd60 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
@@ -27,8 +27,9 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
@@ -76,7 +77,8 @@
     protected final int primaryFieldCount = 6;
     protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
     protected final int primaryKeyFieldCount = 1;
-    protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+    protected final IBinaryComparatorFactory[] primaryComparatorFactories =
+            new IBinaryComparatorFactory[primaryKeyFieldCount];
     protected final int[] primaryBloomFilterKeyFields = new int[primaryKeyFieldCount];
 
     protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -86,26 +88,25 @@
 
     // to be set by subclasses
     protected String primaryFileName;
-    protected boolean primaryRelative;
     protected IFileSplitProvider primarySplitProvider;
 
     // field, type and key declarations for secondary indexes
     protected final int secondaryFieldCount = 2;
     protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
     protected final int secondaryKeyFieldCount = 2;
-    protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+    protected final IBinaryComparatorFactory[] secondaryComparatorFactories =
+            new IBinaryComparatorFactory[secondaryKeyFieldCount];
     protected final int[] secondaryBloomFilterKeyFields = new int[secondaryKeyFieldCount];
 
     protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
             new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
     protected String secondaryFileName;
-    protected boolean secondaryRelative;
     protected IFileSplitProvider secondarySplitProvider;
 
     protected ITreeIndexOperatorTestHelper testHelper;
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new BTreeOperatorTestHelper();
     }
 
@@ -114,13 +115,11 @@
         testHelper = createTestHelper();
         dataflowHelperFactory = createDataFlowHelperFactory();
         primaryFileName = testHelper.getPrimaryIndexName();
-        primaryRelative = testHelper.getPrimaryIndexNameRelative();
-        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, primaryFileName,
-                primaryRelative) });
+        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                primaryFileName) });
         secondaryFileName = testHelper.getSecondaryIndexName();
-        secondaryRelative = testHelper.getSecondaryIndexNameRelative();
-        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                secondaryFileName, secondaryRelative) });
+        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                secondaryFileName) });
 
         // field, type and key declarations for primary index
         primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
@@ -145,7 +144,8 @@
 
     public void createPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider =
+                new TransientLocalResourceFactoryProvider();
         TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
                 lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
                 primaryBloomFilterKeyFields, dataflowHelperFactory, localResourceFactoryProvider,
@@ -158,8 +158,8 @@
     protected void loadPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -201,7 +201,8 @@
 
     public void createSecondaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider =
+                new TransientLocalResourceFactoryProvider();
         TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
                 storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
                 secondaryComparatorFactories, secondaryBloomFilterKeyFields, dataflowHelperFactory,
@@ -270,8 +271,8 @@
         IndexOperation pipelineOperation = useUpsert ? IndexOperation.UPSERT : IndexOperation.INSERT;
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                "data" + File.separator + "tpch0.002" + File.separator + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -290,7 +291,8 @@
 
         // insert into primary index
         int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp =
+                new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                 spec, ordersDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
                 primaryComparatorFactories, primaryBloomFilterKeyFields, primaryFieldPermutation, pipelineOperation,
                 dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
@@ -298,7 +300,8 @@
 
         // first secondary index
         int[] fieldPermutationB = { 4, 0 };
-        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp =
+                new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                 spec, ordersDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
                 secondaryComparatorFactories, secondaryBloomFilterKeyFields, fieldPermutationB, pipelineOperation,
                 dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
index 0bb02f8..53c7a66 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -79,8 +79,7 @@
                 false, null, NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
index baca105..2303323 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
@@ -84,8 +84,7 @@
                 false, null, NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
index 911783c..4c4851b 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
@@ -51,8 +51,7 @@
                 lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
                 primaryBloomFilterKeyFields, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
index 27c15ff..f828220 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
@@ -102,8 +102,7 @@
                 dataflowHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
index 5996ca0..ddbf68f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
@@ -99,18 +99,13 @@
                 storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
                 primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
                 dataflowHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null);
-
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
         spec.addRoot(printer);
         runTest(spec);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
index 79fcae3..68985f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
@@ -101,8 +101,7 @@
                 dataflowHelperFactory, false, false, null, NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
index 9f76706..be9d981 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
@@ -21,12 +21,5 @@
 
 public interface ITreeIndexOperatorTestHelper {
     public String getPrimaryIndexName();
-
-    public boolean getPrimaryIndexNameRelative();
-
     public String getSecondaryIndexName();
-
-    public boolean getSecondaryIndexNameRelative();
-
-    public void cleanup(String primaryFileName, String secondaryFileName);
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
index 6e82f4f..b263068 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
@@ -19,11 +19,6 @@
 
 package org.apache.hyracks.tests.am.common;
 
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.Date;
-
-import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCacheProvider;
 import org.apache.hyracks.test.support.TestVirtualBufferCacheProvider;
@@ -40,54 +35,4 @@
         this.virtualBufferCacheProvider = new TestVirtualBufferCacheProvider(DEFAULT_MEM_PAGE_SIZE,
                 DEFAULT_MEM_NUM_PAGES);
     }
-
-    @Override
-    public String getPrimaryIndexName() {
-        return "primary" + simpleDateFormat.format(new Date());
-    }
-
-    @Override
-    public boolean getPrimaryIndexNameRelative() {
-        return true;
-    }
-
-    @Override
-    public String getSecondaryIndexName() {
-        return "secondary" + simpleDateFormat.format(new Date());
-    }
-
-    @Override
-    public boolean getSecondaryIndexNameRelative() {
-        return true;
-    }
-
-    @Override
-    public void cleanup(String primaryFileName, String secondaryFileName) {
-        for (IODeviceHandle dev : ioManager.getIODevices()) {
-            File primaryDir = new File(dev.getMount(), primaryFileName);
-            cleanupDir(primaryDir);
-            File secondaryDir = new File(dev.getMount(), secondaryFileName);
-            cleanupDir(secondaryDir);
-        }
-    }
-
-    private void cleanupDir(File dir) {
-        if (!dir.exists()) {
-            return;
-        }
-        FilenameFilter filter = new FilenameFilter() {
-            @Override
-            public boolean accept(File dir, String name) {
-                return !name.startsWith(".");
-            }
-        };
-        String[] files = dir.list(filter);
-        if (files != null) {
-            for (String fileName : files) {
-                File file = new File(dir.getPath() + File.separator + fileName);
-                file.delete();
-            }
-        }
-        dir.delete();
-    }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/TreeOperatorTestHelper.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/TreeOperatorTestHelper.java
index dd03568..e721812 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/TreeOperatorTestHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/common/TreeOperatorTestHelper.java
@@ -19,7 +19,6 @@
 
 package org.apache.hyracks.tests.am.common;
 
-import java.io.File;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
@@ -30,33 +29,11 @@
 
     @Override
     public String getPrimaryIndexName() {
-        return System.getProperty("java.io.tmpdir") + "primary" + simpleDateFormat.format(new Date());
+        return "primary" + simpleDateFormat.format(new Date());
     }
 
     @Override
     public String getSecondaryIndexName() {
-        return System.getProperty("java.io.tmpdir") + "secondary" + simpleDateFormat.format(new Date());
-    }
-
-    @Override
-    public void cleanup(String primaryFileName, String secondaryFileName) {
-        File primary = new File(primaryFileName);
-        if (primary.exists()) {
-            primary.deleteOnExit();
-        }
-        File secondary = new File(secondaryFileName);
-        if (secondary.exists()) {
-            secondary.deleteOnExit();
-        }
-    }
-
-    @Override
-    public boolean getPrimaryIndexNameRelative() {
-        return false;
-    }
-
-    @Override
-    public boolean getSecondaryIndexNameRelative() {
-        return false;
+        return "secondary" + simpleDateFormat.format(new Date());
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
index 71e26ae..839187c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
@@ -19,7 +19,7 @@
 
 package org.apache.hyracks.tests.am.lsm.btree;
 
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.tests.am.btree.BTreePrimaryIndexScanOperatorTest;
@@ -27,7 +27,8 @@
 
 public class LSMBTreePrimaryIndexScanOperatorTest extends BTreePrimaryIndexScanOperatorTest {
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
index ebbf35b..e29465d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
@@ -19,14 +19,15 @@
 
 package org.apache.hyracks.tests.am.lsm.btree;
 
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.tests.am.btree.BTreePrimaryIndexSearchOperatorTest;
 import org.apache.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
 
 public class LSMBTreePrimaryIndexSearchOperatorTest extends BTreePrimaryIndexSearchOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
index 93937d4..f0b4436 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
@@ -19,14 +19,15 @@
 
 package org.apache.hyracks.tests.am.lsm.btree;
 
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.tests.am.btree.BTreeSecondaryIndexInsertOperatorTest;
 import org.apache.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
 
 public class LSMBTreeSecondaryIndexInsertOperatorTest extends BTreeSecondaryIndexInsertOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
index 90a09a2..22a5700 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
@@ -19,14 +19,15 @@
 
 package org.apache.hyracks.tests.am.lsm.btree;
 
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.tests.am.btree.BTreeSecondaryIndexSearchOperatorTest;
 import org.apache.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
 
 public class LSMBTreeSecondaryIndexSearchOperatorTest extends BTreeSecondaryIndexSearchOperatorTest {
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
index b2a867e..b3b178d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
@@ -21,7 +21,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -35,7 +35,8 @@
         this.rTreeType = RTreeType.LSMRTREE;
     }
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMRTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
index 9bc781b..18b8c77 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
@@ -21,7 +21,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -34,7 +34,8 @@
         this.rTreeType = RTreeType.LSMRTREE;
     }
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMRTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
index b02ebc2..dea5e6f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
@@ -21,7 +21,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -34,7 +34,8 @@
         this.rTreeType = RTreeType.LSMRTREE_WITH_ANTIMATTER;
     }
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMRTreeWithAntiMatterTuplesOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
index 701e7e0..b725820 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
@@ -21,7 +21,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -34,7 +34,8 @@
             this.rTreeType = RTreeType.LSMRTREE_WITH_ANTIMATTER;
         }
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    @Override
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new LSMRTreeWithAntiMatterTuplesOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
index b6e84e4..a07c4ae 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
@@ -28,8 +28,9 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.DoublePointable;
@@ -106,7 +107,6 @@
 
     // to be set by subclasses
     protected String primaryFileName;
-    protected boolean primaryFileNameRelative;
     protected IFileSplitProvider primarySplitProvider;
 
     // field, type and key declarations for secondary indexes
@@ -126,12 +126,11 @@
     protected IBinaryComparatorFactory[] btreeComparatorFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
 
     protected String secondaryFileName;
-    protected boolean secondaryFileNameRelative;
     protected IFileSplitProvider secondarySplitProvider;
 
     protected ITreeIndexOperatorTestHelper testHelper;
 
-    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksDataException {
         return new RTreeOperatorTestHelper();
     }
 
@@ -140,13 +139,11 @@
         testHelper = createTestHelper();
 
         primaryFileName = testHelper.getPrimaryIndexName();
-        primaryFileNameRelative = testHelper.getPrimaryIndexNameRelative();
-        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, primaryFileName,
-                primaryFileNameRelative) });
+        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                primaryFileName) });
         secondaryFileName = testHelper.getSecondaryIndexName();
-        secondaryFileNameRelative = testHelper.getSecondaryIndexNameRelative();
-        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                secondaryFileName, secondaryFileNameRelative) });
+        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                secondaryFileName) });
 
         // field, type and key declarations for primary index
         primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
@@ -218,8 +215,8 @@
     protected void loadPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/orders-with-locations-part1.txt").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                "data" + File.separator + "orders-with-locations-part1.txt") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -329,8 +326,8 @@
 
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/orders-with-locations-part2.txt").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+                "data" + File.separator + "orders-with-locations-part2.txt") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
index 439ff23..13a00f5 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
@@ -66,11 +66,9 @@
     @Test
     public void searchUpdatedSecondaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
-
         // build tuple
         ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
         DataOutput dos = tb.getDataOutput();
-
         tb.reset();
         DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
         tb.addFieldEndOffset();
@@ -80,47 +78,35 @@
         tb.addFieldEndOffset();
         DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
         tb.addFieldEndOffset();
-
         ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE };
         RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
         ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
                 keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
         int[] keyFields = { 0, 1, 2, 3 };
-
         RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
                 storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
                 secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false, false, null,
                 NoOpOperationCallbackFactory.INSTANCE, null, null);
-
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
-
         // fifth field from the tuples coming from secondary index
         int[] primaryLowKeyFields = { 4 };
         // fifth field from the tuples coming from secondary index
         int[] primaryHighKeyFields = { 4 };
-
         // search primary index
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
                 storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
                 null, primaryLowKeyFields, primaryHighKeyFields, true, true, btreeDataflowHelperFactory, false, false,
                 null, NoOpOperationCallbackFactory.INSTANCE, null, null);
-
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
-
         spec.addRoot(printer);
         runTest(spec);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
index d190cfe..2890b0b 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
@@ -63,11 +63,9 @@
     @Test
     public void scanPrimaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
-
         // build dummy tuple
         ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
         DataOutput dos = tb.getDataOutput();
-
         tb.reset();
         DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
         tb.addFieldEndOffset();
@@ -77,32 +75,24 @@
         tb.addFieldEndOffset();
         DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
         tb.addFieldEndOffset();
-
         ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE };
         RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
         ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
                 keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
         int[] keyFields = null;
-
         RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
                 storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
                 secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false, false, null,
                 NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, printer, 0);
-
         spec.addRoot(printer);
         runTest(spec);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
index 8426c95..9e40588 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -64,11 +64,9 @@
     @Test
     public void searchSecondaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
-
         // build tuple
         ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
         DataOutput dos = tb.getDataOutput();
-
         tb.reset();
         DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
         tb.addFieldEndOffset();
@@ -78,45 +76,35 @@
         tb.addFieldEndOffset();
         DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
         tb.addFieldEndOffset();
-
         ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE };
         RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
         ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
                 keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
         int[] keyFields = { 0, 1, 2, 3 };
-
         RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
                 storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
                 secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false, false, null,
                 NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
-
         // fifth field from the tuples coming from secondary index
         int[] primaryLowKeyFields = { 4 };
         // fifth field from the tuples coming from secondary index
         int[] primaryHighKeyFields = { 4 };
-
         // search primary index
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
                 storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
                 null, primaryLowKeyFields, primaryHighKeyFields, true, true, btreeDataflowHelperFactory, false, false,
                 null, NoOpOperationCallbackFactory.INSTANCE, null, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
-
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
-
         spec.addRoot(printer);
         runTest(spec);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
index 200fde0..9d4b16d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
@@ -55,16 +55,13 @@
     @Test
     public void showPrimaryIndexStats() throws Exception {
         JobSpecification spec = new JobSpecification();
-
         TreeIndexStatsOperatorDescriptor secondaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
                 lcManagerProvider, secondarySplitProvider, secondaryTypeTraits, secondaryComparatorFactories, null,
                 rtreeDataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryStatsOp, NC1_ID);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath(), false) });
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { createFile(nc1) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
         spec.connect(new OneToOneConnectorDescriptor(spec), secondaryStatsOp, 0, printer, 0);
         spec.addRoot(printer);
         runTest(spec);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
index 041ce3d..8696f8b 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
@@ -19,6 +19,7 @@
 package org.apache.hyracks.tests.comm;
 
 import java.io.BufferedReader;
+import java.io.File;
 import java.io.FileReader;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -47,7 +48,8 @@
 
 public class SerializationDeserializationTest {
     private static final Logger LOGGER = Logger.getLogger(SerializationDeserializationTest.class.getName());
-    private static final String DBLP_FILE = "data/dblp.txt";
+    private static final String DBLP_FILE = "data" + File.separator + "device1" + File.separator + "data"
+            + File.separator + "dblp.txt";
 
     private static class SerDeserRunner {
         private final IHyracksTaskContext ctx;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index 7a339b7..fe9c1ea 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -27,15 +27,11 @@
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
-
+import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.client.HyracksConnection;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -43,6 +39,9 @@
 import org.apache.hyracks.api.dataset.IHyracksDataset;
 import org.apache.hyracks.api.dataset.IHyracksDatasetReader;
 import org.apache.hyracks.api.dataset.ResultSetId;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobFlag;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobSpecification;
@@ -54,6 +53,9 @@
 import org.apache.hyracks.control.nc.resources.memory.FrameManager;
 import org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
 import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
 
 public abstract class AbstractIntegrationTest {
     private static final Logger LOGGER = Logger.getLogger(AbstractIntegrationTest.class.getName());
@@ -62,21 +64,19 @@
     public static final String NC2_ID = "nc2";
 
     private static ClusterControllerService cc;
-    private static NodeControllerService nc1;
-    private static NodeControllerService nc2;
+    protected static NodeControllerService nc1;
+    protected static NodeControllerService nc2;
     private static IHyracksClientConnection hcc;
 
     private final List<File> outputFiles;
+    private static AtomicInteger aInteger = new AtomicInteger(0);
 
     protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
     protected static int DEFAULT_MEM_NUM_PAGES = 1000;
     protected static double DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE = 0.01;
 
-    @Rule
-    public TemporaryFolder outputFolder = new TemporaryFolder();
-
     public AbstractIntegrationTest() {
-        outputFiles = new ArrayList<File>();
+        outputFiles = new ArrayList<>();
     }
 
     @BeforeClass
@@ -87,6 +87,8 @@
         ccConfig.clusterNetIpAddress = "127.0.0.1";
         ccConfig.clusterNetPort = 39001;
         ccConfig.profileDumpPeriod = 10000;
+        FileUtils.deleteQuietly(new File("target" + File.separator + "data"));
+        FileUtils.copyDirectory(new File("data"), new File("target" + File.separator + "data"));
         File outDir = new File("target" + File.separator + "ClusterController");
         outDir.mkdirs();
         File ccRoot = File.createTempFile(AbstractIntegrationTest.class.getName(), ".data", outDir);
@@ -103,6 +105,8 @@
         ncConfig1.dataIPAddress = "127.0.0.1";
         ncConfig1.resultIPAddress = "127.0.0.1";
         ncConfig1.nodeId = NC1_ID;
+        ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
+                + File.separator + "device0";
         nc1 = new NodeControllerService(ncConfig1);
         nc1.start();
 
@@ -113,6 +117,8 @@
         ncConfig2.dataIPAddress = "127.0.0.1";
         ncConfig2.resultIPAddress = "127.0.0.1";
         ncConfig2.nodeId = NC2_ID;
+        ncConfig2.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
+                + File.separator + "device1";
         nc2 = new NodeControllerService(ncConfig2);
         nc2.start();
 
@@ -153,7 +159,7 @@
         IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
         IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, resultSetId);
 
-        List<String> resultRecords = new ArrayList<String>();
+        List<String> resultRecords = new ArrayList<>();
         ByteBufferInputStream bbis = new ByteBufferInputStream();
 
         FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
@@ -225,12 +231,12 @@
         hcc.waitForCompletion(jobId);
     }
 
-    protected File createTempFile() throws IOException {
-        File tempFile = File.createTempFile(getClass().getName(), ".tmp", outputFolder.getRoot());
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Output file: " + tempFile.getAbsolutePath());
-        }
-        outputFiles.add(tempFile);
-        return tempFile;
+    protected FileSplit createFile(NodeControllerService ncs) throws IOException {
+        String fileName = "f" + aInteger.getAndIncrement() + ".tmp";
+        FileReference fileRef = ncs.getIoManager().getFileReference(0, fileName);
+        FileUtils.deleteQuietly(fileRef.getFile());
+        fileRef.getFile().createNewFile();
+        outputFiles.add(fileRef.getFile());
+        return new ManagedFileSplit(ncs.getId(), fileName);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index dc0b6f7..890ab0a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -70,7 +70,7 @@
     public TemporaryFolder outputFolder = new TemporaryFolder();
 
     public AbstractMultiNCIntegrationTest() {
-        outputFiles = new ArrayList<File>();
+        outputFiles = new ArrayList<>();
     }
 
     @BeforeClass
@@ -92,6 +92,9 @@
 
         asterixNCs = new NodeControllerService[ASTERIX_IDS.length];
         for (int i = 0; i < ASTERIX_IDS.length; i++) {
+            File ioDev = new File("target" + File.separator + ASTERIX_IDS[i] + File.separator + "ioDevice");
+            FileUtils.forceMkdir(ioDev);
+            FileUtils.copyDirectory(new File("data" + File.separator + "device0"), ioDev);
             NCConfig ncConfig = new NCConfig();
             ncConfig.ccHost = "localhost";
             ncConfig.ccPort = 39001;
@@ -99,6 +102,7 @@
             ncConfig.dataIPAddress = "127.0.0.1";
             ncConfig.resultIPAddress = "127.0.0.1";
             ncConfig.nodeId = ASTERIX_IDS[i];
+            ncConfig.ioDevices = ioDev.getAbsolutePath();
             asterixNCs[i] = new NodeControllerService(ncConfig);
             asterixNCs[i].start();
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
index a1bc086..f20461c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
@@ -30,6 +30,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -73,8 +74,8 @@
 public class AggregationTest extends AbstractIntegrationTest {
 
     final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC2_ID, new File("data/tpch0.001/lineitem.tbl").getAbsolutePath(),
-                    false) });
+            new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.002" + File.separator
+                    + "lineitem.tbl") });
 
     final RecordDescriptor desc = new RecordDescriptor(new ISerializerDeserializer[] {
             new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
index adcb89b..b5c6238 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
@@ -29,6 +29,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -61,8 +62,7 @@
     public void countOfCountsSingleNC() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] splits = new FileSplit[] { new FileSplit(NC1_ID, new File("data/words.txt").getAbsolutePath(),
-                false) };
+        FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
         RecordDescriptor desc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
@@ -72,12 +72,12 @@
                 splitProvider,
                 new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
                 desc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
         InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 desc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC2_ID);
 
         RecordDescriptor desc2 = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE });
@@ -85,11 +85,11 @@
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new MultiFieldsAggregatorFactory(
                         new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC2_ID);
 
         InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC2_ID);
 
         RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
@@ -97,13 +97,13 @@
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
                 new MultiFieldsAggregatorFactory(
                         new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC2_ID);
 
         ResultSetId rsId = new ResultSetId(1);
         IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true, false,
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         spec.addResultSetId(rsId);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
                 new FieldHashPartitionComputerFactory(new int[] { 0 },
@@ -134,8 +134,7 @@
     public void countOfCountsMultiNC() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] splits = new FileSplit[] { new FileSplit(NC1_ID, new File("data/words.txt").getAbsolutePath(),
-                false) };
+        FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
         RecordDescriptor desc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
@@ -145,7 +144,7 @@
                 splitProvider,
                 new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
                 desc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
         InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
@@ -208,8 +207,7 @@
     public void countOfCountsExternalSortMultiNC() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] splits = new FileSplit[] { new FileSplit(NC1_ID, new File("data/words.txt").getAbsolutePath(),
-                false) };
+        FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
         RecordDescriptor desc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
@@ -219,7 +217,7 @@
                 splitProvider,
                 new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
                 desc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
         ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 3, new int[] { 0 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
index 1edf3e9..efbb9d2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
@@ -28,6 +28,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -57,8 +58,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -86,9 +89,9 @@
         ResultSetId rsId = new ResultSetId(1);
         spec.addResultSetId(rsId);
 
-        File file = File.createTempFile(getClass().getName(), ".tmp");
+        FileSplit fs = createFile(nc1);
         IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
-                new FileSplit[] { new FileSplit(NC1_ID, file.getAbsolutePath(), false) });
+                new FileSplit[] { fs });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|");
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
@@ -106,7 +109,7 @@
                         new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
 
         runTest(spec);
-        System.out.println("Result write into :" + file.getAbsolutePath());
+        System.out.println("Result write into :" + fs.getPath() + " in node: " + fs.getNodeName());
     }
 
     @Test
@@ -114,8 +117,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java
index 30e2e03..b76e458 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java
@@ -32,7 +32,7 @@
 
     @Test
     public void failureOnCreatePushRuntime() throws Exception {
-        for (int round = 0; round < 100; ++round) {
+        for (int round = 0; round < 1000; ++round) {
             execTest();
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
index de65776..e92113a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
@@ -31,6 +31,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -69,11 +70,14 @@
 public class LocalityAwareConnectorTest extends AbstractMultiNCIntegrationTest {
 
     final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit("asterix-001", new File("data/tpch0.001/lineitem.tbl").getAbsolutePath(),
-                    false),
-                    new FileSplit("asterix-002", new File("data/tpch0.001/lineitem.tbl").getAbsolutePath(), false),
-                    new FileSplit("asterix-003", new File("data/tpch0.001/lineitem.tbl").getAbsolutePath(), false),
-                    new FileSplit("asterix-004", new File("data/tpch0.001/lineitem.tbl").getAbsolutePath(), false) });
+            new FileSplit[] { new ManagedFileSplit(ASTERIX_IDS[0], "data" + File.separator + "tpch0.001"
+                    + File.separator + "lineitem.tbl"),
+                    new ManagedFileSplit(ASTERIX_IDS[1], "data" + File.separator + "tpch0.001" + File.separator
+                            + "lineitem.tbl"),
+                    new ManagedFileSplit(ASTERIX_IDS[2], "data" + File.separator + "tpch0.001" + File.separator
+                            + "lineitem.tbl"),
+                    new ManagedFileSplit(ASTERIX_IDS[3], "data" + File.separator + "tpch0.001" + File.separator
+                            + "lineitem.tbl") });
 
     final int fileSize = 800 * 1024 * 4;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
index 3bb35ad..b62c011 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
@@ -29,6 +29,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
@@ -65,15 +66,14 @@
 
         JobSpecification spec = new JobSpecification();
 
-        String inputFileName = "data/words.txt";
+        String inputFileName = "data" + File.separator + "nc1" + File.separator + "words.txt";
         File[] outputFile = new File[outputArity];
         for (int i = 0; i < outputArity; i++) {
             outputFile[i] = File.createTempFile("replicateop", null);
             outputFile[i].deleteOnExit();
         }
 
-        FileSplit[] inputSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(inputFileName).getAbsolutePath(),
-                false) };
+        FileSplit[] inputSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, inputFileName) };
 
         String[] locations = new String[] { NC1_ID };
 
@@ -109,7 +109,7 @@
         }
         String[] expectedResultsFileNames = new String[outputArity];
         for (int i = 0; i < outputArity; i++) {
-            expectedResultsFileNames[i] = inputFileName;
+            expectedResultsFileNames[i] = "data" + File.separator + "device0" + File.separator + inputFileName;
         }
         runTestAndCompareResults(spec, expectedResultsFileNames);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
index e3eb98f..dc91dd2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
@@ -28,6 +28,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -54,8 +55,8 @@
         JobSpecification spec = new JobSpecification();
 
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
-                new FileSplit(NC2_ID, new File("data/words.txt").getAbsolutePath(), false),
-                new FileSplit(NC1_ID, new File("data/words.txt").getAbsolutePath(), false) });
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
 
         RecordDescriptor desc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
@@ -63,7 +64,8 @@
         FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
                 spec,
                 splitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE },
+                        ','),
                 desc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID, NC1_ID);
 
@@ -85,8 +87,8 @@
     public void scanPrint02() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -101,14 +103,14 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         ResultSetId rsId = new ResultSetId(1);
         IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true, false,
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         spec.addResultSetId(rsId);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
                 new FieldHashPartitionComputerFactory(new int[] { 0 },
@@ -124,8 +126,8 @@
     public void scanPrint03() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
@@ -140,14 +142,14 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         ResultSetId rsId = new ResultSetId(1);
         IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true, false,
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         spec.addResultSetId(rsId);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
                 new FieldHashPartitionComputerFactory(new int[] { 0 },
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
index 9f05aac..3043cba 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
@@ -28,6 +28,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -55,8 +56,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -103,8 +106,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
index 7b6f5ea..d6f39ad 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
@@ -29,6 +29,7 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.UTF8StringBinaryHashFunctionFamily;
@@ -36,6 +37,7 @@
 import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
 import org.apache.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import org.apache.hyracks.dataflow.std.connectors.MToNBroadcastConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
@@ -83,10 +85,10 @@
         Arrays.fill(orderValueParserFactories, UTF8StringParserFactory.INSTANCE);
     }
 
-    private IOperatorDescriptor getPrinter(JobSpecification spec, File file) {
+    private IOperatorDescriptor getPrinter(JobSpecification spec, String path) {
         IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
                 new FileSplit[] {
-                        new FileSplit(NC1_ID, file.getAbsolutePath(), false) });
+                        new ManagedFileSplit(NC1_ID, path) });
 
         return DEBUG ? new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|")
                 : new NullSinkOperatorDescriptor(spec);
@@ -95,17 +97,17 @@
     @Test
     public void customerOrderCIDHybridHashJoin_Case1() throws Exception {
         JobSpecification spec = new JobSpecification();
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer4.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer4.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders4.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders4.tbl") };
 
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
                 new DelimitedDataTupleParserFactory(orderValueParserFactories, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
                 new DelimitedDataTupleParserFactory(custValueParserFactories, '|'), custDesc);
@@ -122,14 +124,14 @@
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        File file = File.createTempFile(getClass().getName(), "case1");
-        IOperatorDescriptor printer = getPrinter(spec, file);
+        String path = getClass().getName() + File.separator + "case1";
+        IOperatorDescriptor printer = getPrinter(spec, path);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
         spec.connect(custJoinConn, custScanner, 0, join, 0);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 1);
 
         IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
@@ -137,25 +139,25 @@
 
         spec.addRoot(printer);
         runTest(spec);
-        System.out.println("output to " + file.getAbsolutePath());
+        System.out.println("output to " + path);
     }
 
     @Test
     public void customerOrderCIDHybridHashJoin_Case2() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer3.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer3.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders4.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders4.tbl") };
 
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
 
         FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
                 new DelimitedDataTupleParserFactory(orderValueParserFactories, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
                 new DelimitedDataTupleParserFactory(custValueParserFactories, '|'), custDesc);
@@ -172,14 +174,14 @@
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        File file = File.createTempFile(getClass().getName(), "case2");
-        IOperatorDescriptor printer = getPrinter(spec, file);
+        String path = getClass().getName() + File.separator + "case2";
+        IOperatorDescriptor printer = getPrinter(spec, path);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
         spec.connect(custJoinConn, custScanner, 0, join, 0);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 1);
 
         IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
@@ -187,7 +189,7 @@
 
         spec.addRoot(printer);
         runTest(spec);
-        System.out.println("output to " + file.getAbsolutePath());
+        System.out.println("output to " + path);
     }
 
     @Test
@@ -195,18 +197,18 @@
 
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer3.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer3.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders1.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders1.tbl") };
 
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
 
         FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
                 new DelimitedDataTupleParserFactory(orderValueParserFactories, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
                 new DelimitedDataTupleParserFactory(custValueParserFactories, '|'), custDesc);
@@ -223,14 +225,14 @@
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        File file = File.createTempFile(getClass().getName(), "case3");
-        IOperatorDescriptor printer = getPrinter(spec, file);
+        String path = getClass().getName() + File.separator + "case3";
+        IOperatorDescriptor printer = getPrinter(spec, path);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
         spec.connect(custJoinConn, custScanner, 0, join, 0);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 1);
 
         IConnectorDescriptor joinPrinterConn = new OneToOneConnectorDescriptor(spec);
@@ -238,7 +240,7 @@
 
         spec.addRoot(printer);
         runTest(spec);
-        System.out.println("output to " + file.getAbsolutePath());
+        System.out.println("output to " + path);
     }
 
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index 5270a70..f8c7487 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -30,6 +30,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -71,8 +72,8 @@
     public void customerOrderCIDJoin() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -80,8 +81,8 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
@@ -108,7 +109,7 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
                 ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
                 new DelimitedDataTupleParserFactory(
@@ -134,7 +135,7 @@
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -151,8 +152,8 @@
     public void customerOrderCIDHybridHashJoin() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -160,8 +161,8 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -187,7 +188,7 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
                 new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
@@ -217,7 +218,7 @@
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -234,8 +235,8 @@
     public void customerOrderCIDInMemoryHashLeftOuterJoin() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -243,8 +244,8 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
@@ -302,7 +303,7 @@
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 1);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -319,8 +320,8 @@
     public void customerOrderCIDHybridHashLeftOuterJoin() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -328,8 +329,8 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC2_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
@@ -387,7 +388,7 @@
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 1);
 
         IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -405,8 +406,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -415,8 +418,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
@@ -493,8 +498,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -503,8 +510,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
@@ -579,8 +588,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -589,8 +600,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
@@ -631,7 +644,8 @@
 
         InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
                 new int[] { 0 },
-                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
+                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                        .of(UTF8StringPointable.FACTORY) },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 custOrderJoinDesc, 128, null);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
@@ -665,8 +679,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -675,8 +691,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
index fac1815..03cd5d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
@@ -35,6 +35,7 @@
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
@@ -126,8 +127,8 @@
     public void customerOrderCIDJoin() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileSplit[] custSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/customer.tbl").getAbsolutePath(), false) };
+        FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "customer.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -135,8 +136,8 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File(
-                "data/tpch0.001/orders.tbl").getAbsolutePath(), false) };
+        FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
+                + "tpch0.001" + File.separator + "orders.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -162,7 +163,7 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
 
         FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
                 new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
@@ -184,7 +185,7 @@
                 ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
-        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        IConnectorDescriptor ordJoinConn = new MToNBroadcastConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
         IConnectorDescriptor custJoinConn = new MToNBroadcastConnectorDescriptor(spec);
@@ -202,8 +203,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -212,8 +215,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -279,8 +284,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -289,8 +296,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -341,7 +350,7 @@
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
         spec.connect(ordJoinConn, ordScanner, 0, join, 0);
 
-        IConnectorDescriptor custJoinConn = new MToNBroadcastConnectorDescriptor(spec);
+        IConnectorDescriptor custJoinConn = new OneToOneConnectorDescriptor(spec);
         spec.connect(custJoinConn, custScanner, 0, join, 1);
 
         IConnectorDescriptor joinPrinterConn = new MToNBroadcastConnectorDescriptor(spec);
@@ -356,8 +365,10 @@
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] custSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/customer-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/customer-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "customer-part2.tbl") };
         IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
         RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -366,8 +377,10 @@
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
 
         FileSplit[] ordersSplits = new FileSplit[] {
-                new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl").getAbsolutePath(), false),
-                new FileSplit(NC2_ID, new File("data/tpch0.001/orders-part2.tbl").getAbsolutePath(), false) };
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part1.tbl"),
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
+                        + "orders-part2.tbl") };
         IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
         RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
index 7824071..02cab8f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
@@ -26,6 +26,7 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
@@ -46,8 +47,8 @@
         JobSpecification spec = new JobSpecification();
 
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
-                new FileSplit(NC2_ID, new File("data/words.txt").getAbsolutePath(), false),
-                new FileSplit(NC1_ID, new File("data/words.txt").getAbsolutePath(), false) });
+                new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
+                new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
 
         RecordDescriptor desc = new RecordDescriptor(
                 new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
index ecfd212..29e1d6e 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
@@ -20,6 +20,7 @@
 package org.apache.hyracks.tests.integration;
 
 import java.io.File;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
@@ -28,6 +29,7 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.ManagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -49,11 +51,10 @@
 
 public class VSizeFrameSortMergeTest extends AbstractIntegrationTest {
 
-    public static String[] INPUTS = { "data/tpch0.001/orders-part1.tbl", "data/tpch0.001/orders-part2.tbl" };
-
+    AtomicInteger aInteger = new AtomicInteger(0);
     FileSplit[] ordersSplits = new FileSplit[] {
-            new FileSplit(NC1_ID, new File(INPUTS[0]).getAbsolutePath(), false),
-            new FileSplit(NC2_ID, new File(INPUTS[1]).getAbsolutePath(), false) };
+            new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+            new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
     IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
     RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
             new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -92,10 +93,10 @@
                         PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
 
-        File file = File.createTempFile(getClass().getName(), ".tmp");
+        String path = getClass().getSimpleName() + aInteger.getAndIncrement() + ".tmp";
 
         IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
-                new FileSplit[] { new FileSplit(NC1_ID, file.getAbsolutePath(), false) });
+                new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
         IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|");
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
@@ -114,6 +115,6 @@
 
         spec.addRoot(printer);
         runTest(spec);
-        System.out.println("Result write into :" + file.getAbsolutePath());
+        System.out.println("Result write into :" + path);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
index 5f4ac2d..650c60d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/src/main/java/org/apache/hyracks/examples/text/client/WordCountMain.java
@@ -32,6 +32,7 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.UnmanagedFileSplit;
 import org.apache.hyracks.api.job.JobFlag;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobSpecification;
@@ -127,7 +128,7 @@
                 throw new IllegalArgumentException("File split " + s + " not well formed");
             }
             File file = new File(s.substring(idx + 1));
-            fSplits[i] = new FileSplit(s.substring(0, idx), file.getAbsolutePath(), false);
+            fSplits[i] = new UnmanagedFileSplit(s.substring(0, idx), file.getAbsolutePath());
             fileSize += file.length();
         }
         return fSplits;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
index 5b17b93..3c0ecfd 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
@@ -26,6 +26,7 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.io.FileSplit;
+import org.apache.hyracks.api.io.UnmanagedFileSplit;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
@@ -99,22 +100,7 @@
             if (idx < 0) {
                 throw new IllegalArgumentException("File split " + s + " not well formed");
             }
-            fSplits[i] = new FileSplit(s.substring(0, idx), new File(s.substring(idx + 1)).getAbsolutePath(), false);
-        }
-        return fSplits;
-    }
-
-    static FileSplit[] parseFileSplits(String fileSplits, int count) {
-        String[] splits = fileSplits.split(",");
-        FileSplit[] fSplits = new FileSplit[splits.length];
-        for (int i = 0; i < splits.length; ++i) {
-            String s = splits[i].trim();
-            int idx = s.indexOf(':');
-            if (idx < 0) {
-                throw new IllegalArgumentException("File split " + s + " not well formed");
-            }
-            fSplits[i] = new FileSplit(s.substring(0, idx), new File(s.substring(idx + 1) + "_"
-                    + count).getAbsolutePath(), false);
+            fSplits[i] = new UnmanagedFileSplit(s.substring(0, idx), new File(s.substring(idx + 1)).getAbsolutePath());
         }
         return fSplits;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
index d831e89..42fe8c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
@@ -187,16 +187,10 @@
         spec.connect(scanGroupConnDef2, fileScanner, 0, grouper, 0);
 
         IFileSplitProvider outSplitProvider = new ConstantFileSplitProvider(outSplits);
-
-        AbstractSingleActivityOperatorDescriptor writer;
-
-        if (outPlain)
-            writer = new PlainFileWriterOperatorDescriptor(spec, outSplitProvider, "|");
-        else
-            writer = new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
-
+        AbstractSingleActivityOperatorDescriptor writer = outPlain ? new PlainFileWriterOperatorDescriptor(spec,
+                outSplitProvider, "|")
+                : new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
         createPartitionConstraint(spec, writer, outSplits);
-
         IConnectorDescriptor groupOutConn = new OneToOneConnectorDescriptor(spec);
         spec.connect(groupOutConn, grouper, 0, writer, 0);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
index c791f93..8ab0708 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
@@ -26,9 +26,6 @@
 
 import java.util.EnumSet;
 
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
 import org.apache.hyracks.api.client.HyracksConnection;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
@@ -55,6 +52,8 @@
 import org.apache.hyracks.dataflow.std.sort.Algorithm;
 import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.TopKSorterOperatorDescriptor;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
 
 public class Sort {
     private static class Options {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
index 5dbbae4..76e4dec 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
@@ -58,7 +58,7 @@
         this.localResourceRepository = opDesc.getStorageManager().getLocalResourceRepository(ctx);
         this.resourceIdFactory = opDesc.getStorageManager().getResourceIdFactory(ctx);
         FileSplit fileSplit = opDesc.getFileSplitProvider().getFileSplits()[partition];
-        this.resourceRef = ioManager.getFileRef(fileSplit.getPath(), fileSplit.isManaged());
+        this.resourceRef = fileSplit.getFileReference(ioManager);
         this.resourceName = resourceRef.getRelativePath();
         this.durable = durable;
         this.partition = partition;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
index 5121f89..a501efb 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -72,7 +72,7 @@
             IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
             LocalResource resource = treeIndexHelper.getResource();
             IIOManager ioManager = ctx.getIOManager();
-            FileReference fileRef = ioManager.getFileRef(resource.getPath(), true);
+            FileReference fileRef = ioManager.resolve(resource.getPath());
             int indexFileId = fileMapProvider.lookupFileId(fileRef);
             statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getMetaManager(), indexFileId,
                     treeIndex.getRootPageId());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/IndexFileNameUtil.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/IndexFileNameUtil.java
index 496a2a2..71d7c76 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/IndexFileNameUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/IndexFileNameUtil.java
@@ -39,6 +39,6 @@
     public static FileReference getIndexAbsoluteFileRef(IIndexOperatorDescriptor opDesc, int partition,
             IIOManager ioManager) throws HyracksDataException {
         FileSplit split = opDesc.getFileSplitProvider().getFileSplits()[partition];
-        return ioManager.getFileRef(split.getPath(), split.isManaged());
+        return split.getFileReference(ioManager);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index 3f05399..9d12562 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -123,7 +123,7 @@
                     new BTree(virtualBufferCache, virtualBufferCache.getFileMapProvider(),
                             new VirtualMetaDataPageManager(virtualBufferCache.getNumPages()), interiorFrameFactory,
                             insertLeafFrameFactory, cmpFactories, fieldCount,
-                            ioManager.getFileRef(fileManager.getBaseDir() + "_virtual_" + i, false)),
+                            ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_" + i)),
                     virtualBufferCache, i == 0 ? true : false,
                     filterFactory == null ? null : filterFactory.createLSMComponentFilter());
             memoryComponents.add(mutableComponent);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
index bf6ef24..4a12b5a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
@@ -58,8 +58,8 @@
         String ts = getCurrentTimestamp();
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING, false), null,
-                hasBloomFilter ? createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING, false) : null);
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), null,
+                hasBloomFilter ? createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING) : null);
     }
 
     @Override
@@ -70,8 +70,8 @@
 
         String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
         // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING, false), null,
-                hasBloomFilter ? createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING, false) : null);
+        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), null,
+                hasBloomFilter ? createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING) : null);
     }
 
     private static FilenameFilter btreeFilter = new FilenameFilter() {
@@ -205,8 +205,8 @@
 
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a transaction
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING, false), null,
-                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), null,
+                createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -245,8 +245,8 @@
                 throw new HyracksDataException("unrecognized file found = " + fileName);
             }
         }
-        FileReference bTreeFileRef = ioManager.getFileRef(bTreeFile.getAbsolutePath(), false);
-        FileReference bloomFilterFileRef = ioManager.getFileRef(bloomFilterFile.getAbsolutePath(), false);
+        FileReference bTreeFileRef = ioManager.resolveAbsolutePath(bTreeFile.getAbsolutePath());
+        FileReference bloomFilterFileRef = ioManager.resolveAbsolutePath(bloomFilterFile.getAbsolutePath());
 
         return new LSMComponentFileReferences(bTreeFileRef, null, bloomFilterFileRef);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
index e0bebf4..683c2e9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
@@ -73,10 +73,9 @@
         String ts = getCurrentTimestamp();
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING, false),
-                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING, false), createFlushFile(baseName
-                        + SPLIT_STRING
-                        + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createFlushFile(baseName
+                        + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -88,10 +87,9 @@
         String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
         // Get the range of timestamps by taking the earliest and the latest
         // timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING, false),
-                createMergeFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING, false), createMergeFile(baseName
-                        + SPLIT_STRING
-                        + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING),
+                createMergeFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createMergeFile(baseName
+                        + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -211,10 +209,9 @@
         Files.createFile(Paths.get(baseDir + TRANSACTION_PREFIX + ts));
 
         String baseName = baseDir + ts + SPLIT_STRING + ts;
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING, false),
-                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING, false), createFlushFile(baseName
-                        + SPLIT_STRING
-                        + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createFlushFile(baseName
+                        + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -256,9 +253,9 @@
                 throw new HyracksDataException("unrecognized file found = " + fileName);
             }
         }
-        FileReference bTreeFileRef = ioManager.getFileRef(bTreeFile.getAbsolutePath(), false);
-        FileReference buddyBTreeFileRef = ioManager.getFileRef(buddyBTreeFile.getAbsolutePath(), false);
-        FileReference bloomFilterFileRef = ioManager.getFileRef(bloomFilterFile.getAbsolutePath(), false);
+        FileReference bTreeFileRef = ioManager.resolveAbsolutePath(bTreeFile.getAbsolutePath());
+        FileReference buddyBTreeFileRef = ioManager.resolveAbsolutePath(buddyBTreeFile.getAbsolutePath());
+        FileReference bloomFilterFileRef = ioManager.resolveAbsolutePath(bloomFilterFile.getAbsolutePath());
         return new LSMComponentFileReferences(bTreeFileRef, buddyBTreeFileRef, bloomFilterFileRef);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
index b9f4cad..64742ef 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
@@ -124,7 +124,7 @@
         File dir = new File(baseDir);
         String[] files = dir.list(filter);
         for (String fileName : files) {
-            FileReference fileRef = ioManager.getFileRef(dir.getPath() + File.separator + fileName, false);
+            FileReference fileRef = ioManager.resolveAbsolutePath(dir.getPath() + File.separator + fileName);
             if (treeFactory == null) {
                 allFiles.add(new ComparableFileName(fileRef));
                 continue;
@@ -184,19 +184,19 @@
         }
     };
 
-    protected FileReference createFlushFile(String relFlushFileName, boolean relative) throws HyracksDataException {
-        return ioManager.getFileRef(relFlushFileName, relative);
+    protected FileReference createFlushFile(String flushFileName) throws HyracksDataException {
+        return ioManager.resolveAbsolutePath(flushFileName);
     }
 
-    protected FileReference createMergeFile(String relMergeFileName, boolean relative) throws HyracksDataException {
-        return createFlushFile(relMergeFileName, relative);
+    protected FileReference createMergeFile(String mergeFileName) throws HyracksDataException {
+        return createFlushFile(mergeFileName);
     }
 
     @Override
     public LSMComponentFileReferences getRelFlushFileReference() throws HyracksDataException {
         String ts = getCurrentTimestamp();
         // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseDir + ts + SPLIT_STRING + ts, false), null, null);
+        return new LSMComponentFileReferences(createFlushFile(baseDir + ts + SPLIT_STRING + ts), null, null);
     }
 
     @Override
@@ -206,7 +206,7 @@
         String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
         // Get the range of timestamps by taking the earliest and the latest timestamps
         return new LSMComponentFileReferences(createMergeFile(baseDir + firstTimestampRange[0] + SPLIT_STRING
-                + lastTimestampRange[1], false), null, null);
+                + lastTimestampRange[1]), null, null);
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
index 4478feb..9179923 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
@@ -139,8 +139,8 @@
             BTree deleteKeysBTree = BTreeUtils.createBTree(virtualBufferCache,
                     new VirtualMetaDataPageManager(virtualBufferCache.getNumPages()),
                     virtualBufferCache.getFileMapProvider(), invListTypeTraits, invListCmpFactories,
-                    BTreeLeafFrameType.REGULAR_NSM, ioManager.getFileRef(fileManager.getBaseDir() + "_virtual_del_" + i,
-                            false));
+                    BTreeLeafFrameType.REGULAR_NSM, ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_del_"
+                            + i));
             LSMInvertedIndexMemoryComponent mutableComponent = new LSMInvertedIndexMemoryComponent(memInvIndex,
                     deleteKeysBTree, virtualBufferCache, i == 0 ? true : false,
                     filterFactory == null ? null : filterFactory.createLSMComponentFilter());
@@ -804,7 +804,7 @@
             IVirtualMetaDataPageManager virtualFreePageManager, int id) throws IndexException, HyracksDataException {
         return InvertedIndexUtils.createInMemoryBTreeInvertedindex(virtualBufferCache, virtualFreePageManager,
                 invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, ioManager
-                        .getFileRef(fileManager.getBaseDir() + "_virtual_vocab_" + id, false));
+                        .resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_vocab_" + id));
     }
 
     protected LSMInvertedIndexDiskComponent createDiskInvIndexComponent(ILSMComponentFactory factory,
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
index 63772d2..97255f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
@@ -78,9 +78,9 @@
         String ts = getCurrentTimestamp();
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX, false),
-                createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX, false), createFlushFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
+                createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createFlushFile(baseName
+                        + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -91,9 +91,9 @@
 
         String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
         // Get the range of timestamps by taking the earliest and the latest timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX, false),
-                createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX, false), createMergeFile(baseName
-                        + SPLIT_STRING + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
+                createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createMergeFile(baseName
+                        + SPLIT_STRING + BLOOM_FILTER_STRING));
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
index 382d065..442d597 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
@@ -71,7 +71,7 @@
             IVirtualMetaDataPageManager virtualFreePageManager, int id) throws IndexException, HyracksDataException {
         return InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(virtualBufferCache,
                 virtualFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
-                tokenizerFactory, ioManager.getFileRef(fileManager.getBaseDir() + "_virtual_vocab_" + id, false));
+                tokenizerFactory, ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_vocab_" + id));
     }
 
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java
index ab852ed..c506ab7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexFactory.java
@@ -57,7 +57,7 @@
     @Override
     public IInvertedIndex createIndexInstance(FileReference dictBTreeFile) throws IndexException, HyracksDataException {
         String invListsFilePath = fileNameMapper.getInvListsFilePath(dictBTreeFile.getFile().getPath());
-        FileReference invListsFile = ioManager.getFileRef(invListsFilePath, false);
+        FileReference invListsFile = ioManager.resolveAbsolutePath(invListsFilePath);
         IInvertedListBuilder invListBuilder = invListBuilderFactory.create();
         return new OnDiskInvertedIndex(bufferCache, fileMapProvider, invListBuilder, invListTypeTraits,
                 invListCmpFactories, tokenTypeTraits, tokenCmpFactories, dictBTreeFile, invListsFile);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java
index 85adb45..4e572c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndexFactory.java
@@ -46,7 +46,7 @@
     @Override
     public IInvertedIndex createIndexInstance(FileReference dictBTreeFile) throws IndexException, HyracksDataException {
         String invListsFilePath = fileNameMapper.getInvListsFilePath(dictBTreeFile.getFile().getAbsolutePath());
-        FileReference invListsFile = ioManager.getFileRef(invListsFilePath, false);
+        FileReference invListsFile = ioManager.resolveAbsolutePath(invListsFilePath);
         IInvertedListBuilder invListBuilder = invListBuilderFactory.create();
         return new PartitionedOnDiskInvertedIndex(bufferCache, fileMapProvider, invListBuilder, invListTypeTraits,
                 invListCmpFactories, tokenTypeTraits, tokenCmpFactories, dictBTreeFile, invListsFile);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
index 23e69c8..36a51d3 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
@@ -109,7 +109,7 @@
 
     public static FileReference getBTreeFile(IIOManager ioManager, FileReference invListsFile)
             throws HyracksDataException {
-        return ioManager.getFileRef(invListsFile.getFile().getPath() + "_btree", false);
+        return ioManager.resolveAbsolutePath(invListsFile.getFile().getPath() + "_btree");
     }
 
     public static BTreeFactory createDeletedKeysBTreeFactory(IIOManager ioManager, IFileMapProvider diskFileMapProvider,
@@ -150,7 +150,7 @@
         BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
                 bloomFilterKeyFields);
 
-        FileReference onDiskDirFileRef = ioManager.getFileRef(absoluteOnDiskDir, false);
+        FileReference onDiskDirFileRef = ioManager.resolveAbsolutePath(absoluteOnDiskDir);
         LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(ioManager, diskFileMapProvider,
                 onDiskDirFileRef, deletedKeysBTreeFactory);
 
@@ -202,7 +202,7 @@
         BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
                 bloomFilterKeyFields);
 
-        FileReference onDiskDirFileRef = ioManager.getFileRef(absoluteOnDiskDir, false);
+        FileReference onDiskDirFileRef = ioManager.resolveAbsolutePath(absoluteOnDiskDir);
         LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(ioManager, diskFileMapProvider,
                 onDiskDirFileRef, deletedKeysBTreeFactory);
 
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
index 4eb3b03..64c9d17 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
@@ -102,11 +102,11 @@
             RTree memRTree = new RTree(virtualBufferCache, virtualBufferCache.getFileMapProvider(),
                     new VirtualMetaDataPageManager(virtualBufferCache.getNumPages()), rtreeInteriorFrameFactory,
                     rtreeLeafFrameFactory, rtreeCmpFactories, fieldCount,
-                    ioManager.getFileRef(fileManager.getBaseDir() + "_virtual_r_" + i, false), isPointMBR);
+                    ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_r_" + i), isPointMBR);
             BTree memBTree = new BTree(virtualBufferCache, virtualBufferCache.getFileMapProvider(),
                     new VirtualMetaDataPageManager(virtualBufferCache.getNumPages()), btreeInteriorFrameFactory,
                     btreeLeafFrameFactory, btreeCmpFactories, btreeCmpFactories.length,
-                    ioManager.getFileRef(fileManager.getBaseDir() + "_virtual_b_" + i, false));
+                    ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_b_" + i));
             LSMRTreeMemoryComponent mutableComponent = new LSMRTreeMemoryComponent(memRTree, memBTree,
                     virtualBufferCache, i == 0 ? true : false,
                     filterFactory == null ? null : filterFactory.createLSMComponentFilter());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
index 43d3878..cb6c065 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
@@ -73,9 +73,9 @@
         String ts = getCurrentTimestamp();
         String baseName = baseDir + ts + SPLIT_STRING + ts;
         // Begin timestamp and end timestamp are identical since it is a flush
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING, false),
-                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING, false), createFlushFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
+                        + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -87,9 +87,9 @@
         String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
         // Get the range of timestamps by taking the earliest and the latest
         // timestamps
-        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + RTREE_STRING, false),
-                createMergeFile(baseName + SPLIT_STRING + BTREE_STRING, false), createMergeFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + RTREE_STRING),
+                createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), createMergeFile(baseName + SPLIT_STRING
+                        + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -205,9 +205,9 @@
         Files.createFile(Paths.get(baseDir + TRANSACTION_PREFIX + ts));
 
         String baseName = baseDir + ts + SPLIT_STRING + ts;
-        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING, false),
-                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING, false), createFlushFile(baseName + SPLIT_STRING
-                        + BLOOM_FILTER_STRING, false));
+        return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
+                createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
+                        + BLOOM_FILTER_STRING));
     }
 
     @Override
@@ -248,9 +248,9 @@
                 throw new HyracksDataException("unrecognized file found = " + fileName);
             }
         }
-        FileReference rTreeFileRef = ioManager.getFileRef(rTreeFile.getAbsolutePath(), false);
-        FileReference bTreeFileRef = ioManager.getFileRef(bTreeFile.getAbsolutePath(), false);
-        FileReference bloomFilterFileRef = ioManager.getFileRef(bloomFilterFile.getAbsolutePath(), false);
+        FileReference rTreeFileRef = ioManager.resolveAbsolutePath(rTreeFile.getAbsolutePath());
+        FileReference bTreeFileRef = ioManager.resolveAbsolutePath(bTreeFile.getAbsolutePath());
+        FileReference bloomFilterFileRef = ioManager.resolveAbsolutePath(bloomFilterFile.getAbsolutePath());
         return new LSMComponentFileReferences(rTreeFileRef, bTreeFileRef, bloomFilterFileRef);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
index cffb844..95c3448 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.util.SerdeUtils;
@@ -52,7 +51,7 @@
 
     protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
 
-    protected abstract void setUp() throws HyracksException;
+    protected abstract void setUp() throws HyracksDataException;
 
     protected abstract void tearDown() throws HyracksDataException;
 
@@ -66,7 +65,7 @@
     protected abstract String getIndexTypeName();
 
     protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf,
-            String dataMsg) throws InterruptedException, TreeIndexException, HyracksException {
+            String dataMsg) throws InterruptedException, TreeIndexException, HyracksDataException {
         setUp();
 
         if (LOGGER.isLoggable(Level.INFO)) {
@@ -104,7 +103,7 @@
     }
 
     @Test
-    public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {
+    public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE };
         int numKeys = 1;
@@ -117,7 +116,7 @@
     }
 
     @Test
-    public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {
+    public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksDataException {
         ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
                 new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
         int numKeys = 1;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
index 4fcd55c..27d7aa8 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -27,7 +27,6 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.data.std.primitive.DoublePointable;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
@@ -66,7 +65,7 @@
 
     protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
 
-    protected abstract void setUp() throws HyracksException;
+    protected abstract void setUp() throws HyracksDataException;
 
     protected abstract void tearDown() throws HyracksDataException;
 
@@ -83,7 +82,7 @@
 
     protected void runTest(ISerializerDeserializer[] fieldSerdes,
             IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType,
-            int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksException, InterruptedException,
+            int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException,
             TreeIndexException {
         setUp();
 
@@ -131,7 +130,7 @@
     }
 
     @Test
-    public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
+    public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksDataException, TreeIndexException {
         ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
@@ -172,7 +171,7 @@
     }
 
     @Test
-    public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
+    public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksDataException, TreeIndexException {
         ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
@@ -194,7 +193,7 @@
     }
 
     @Test
-    public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
+    public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException, TreeIndexException {
         if (!testRstarPolicy) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Integer Keys.");
@@ -249,7 +248,7 @@
     }
 
     @Test
-    public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
+    public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException, TreeIndexException {
         if (!testRstarPolicy) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Ignoring RTree Multithread Test With Four Dimensions With Double Keys.");
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestJobletContext.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestJobletContext.java
index f2ac2bf..7ef33bd 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestJobletContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestJobletContext.java
@@ -44,7 +44,7 @@
         this.frameSize = frameSize;
         this.appContext = appContext;
         this.jobId = jobId;
-        fileFactory = new WorkspaceFileFactory(this, (IOManager) getIOManager());
+        fileFactory = new WorkspaceFileFactory(this, (IIOManager) getIOManager());
         this.frameManger = new FrameManager(frameSize);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java
index a26fac0..87739ea 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java
@@ -36,7 +36,7 @@
 
 public class TestNCApplicationContext implements INCApplicationContext {
     private final ILifeCycleComponentManager lccm;
-    private final IOManager ioManager;
+    private final IIOManager ioManager;
     private final String nodeId;
 
     private Serializable distributedState;
@@ -44,7 +44,7 @@
 
     private final IMemoryManager mm;
 
-    public TestNCApplicationContext(IOManager ioManager, String nodeId) {
+    public TestNCApplicationContext(IIOManager ioManager, String nodeId) {
         this.lccm = new LifeCycleComponentManager();
         this.ioManager = ioManager;
         this.nodeId = nodeId;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestStorageManagerComponentHolder.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestStorageManagerComponentHolder.java
index ee5f320..25308be 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestStorageManagerComponentHolder.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestStorageManagerComponentHolder.java
@@ -25,6 +25,7 @@
 import java.util.concurrent.ThreadFactory;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
@@ -100,9 +101,9 @@
         return fileMapProvider;
     }
 
-    public synchronized static IOManager getIOManager() throws HyracksException {
+    public synchronized static IOManager getIOManager() throws HyracksDataException {
         if (ioManager == null) {
-            List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
+            List<IODeviceHandle> devices = new ArrayList<>();
             devices.add(new IODeviceHandle(new File(System.getProperty("java.io.tmpdir")), "iodev_test_wa"));
             ioManager = new IOManager(devices, Executors.newCachedThreadPool());
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestTaskContext.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestTaskContext.java
index aa0a7bd..efae786 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestTaskContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestTaskContext.java
@@ -48,7 +48,7 @@
     public TestTaskContext(TestJobletContext jobletContext, TaskAttemptId taskId) {
         this.jobletContext = jobletContext;
         this.taskId = taskId;
-        fileFactory = new WorkspaceFileFactory(this, (IOManager) getIOManager());
+        fileFactory = new WorkspaceFileFactory(this, (IIOManager) getIOManager());
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
index e7d493c..f718e38 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/BloomFilterTestHarness.java
@@ -71,7 +71,7 @@
         ioManager = ctx.getIOManager();
         bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        file = ioManager.getFileRef(0, simpleDateFormat.format(new Date()));
+        file = ioManager.getFileReference(0, simpleDateFormat.format(new Date()));
         rnd.setSeed(RANDOM_SEED);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/BTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/BTreeTestHarness.java
index dd6aca6..3022b7f 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/BTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/BTreeTestHarness.java
@@ -73,7 +73,7 @@
         TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
         bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        file = ctx.getIOManager().getFileRef(0, simpleDateFormat.format(new Date()));
+        file = ctx.getIOManager().getFileReference(0, simpleDateFormat.format(new Date()));
         rnd.setSeed(RANDOM_SEED);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
index 726021b..d17b98e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexBulkLoadTest;
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
@@ -42,7 +41,7 @@
     private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
index 810ef4e..3438246 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexDeleteTest;
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
@@ -42,7 +41,7 @@
     private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
index 065b0bd..a6c34c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -59,7 +58,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
index e258e04..1dd102b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexInsertTest;
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
@@ -42,7 +41,7 @@
     private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
index 5b09da8..0a2806b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
 import org.apache.hyracks.storage.am.lsm.btree.util.LSMBTreeTestContext;
@@ -41,7 +40,7 @@
     private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
index 7ba072c..41b3ad2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexBulkLoadTest;
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
@@ -43,7 +42,7 @@
     private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
index 3ddfe09..4e9693c 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
 import org.apache.hyracks.storage.am.btree.OrderedIndexUpdateTest;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
@@ -42,7 +41,7 @@
     private final LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
index 6a6755b..4c23275 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.OrderedIndexMultiThreadTest;
 import org.apache.hyracks.storage.am.common.IIndexTestWorkerFactory;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
@@ -42,7 +41,7 @@
     private final LSMBTreeTestWorkerFactory workerFactory = new LSMBTreeTestWorkerFactory();
 
     @Override
-    protected void setUp() throws HyracksException {
+    protected void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
index ef978ab..62c0c43 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
@@ -21,7 +21,7 @@
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
 import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
@@ -32,7 +32,7 @@
     protected final float fillFactor;
 
     public BTreeBulkLoadRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws BTreeException, HyracksException {
+            IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws BTreeException, HyracksDataException {
         super(numBatches, pageSize, numPages, typeTraits, cmpFactories);
         this.fillFactor = fillFactor;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
index 0dbab03d..d28d874 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
@@ -23,7 +23,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
 import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
 import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
@@ -36,7 +35,7 @@
     protected static final int HYRACKS_FRAME_SIZE = 128;
 
     public BTreeRunner(int numTuples, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksException {
+            IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksDataException {
         super(numTuples, pageSize, numPages, typeTraits, cmpFactories);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
index 606ec4e..0225843 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
@@ -25,7 +25,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
@@ -60,12 +59,12 @@
     protected BTree btree;
 
     public InMemoryBTreeRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksException {
+            IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksDataException {
         this.numBatches = numBatches;
         TestStorageManagerComponentHolder.init(pageSize, numPages, numPages);
         IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
         fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        file = ioManager.getFileRef(fileName, false);
+        file = ioManager.resolveAbsolutePath(fileName);
         init(pageSize, numPages, typeTraits, cmpFactories);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
index 52d3b58..b683a92 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
@@ -29,7 +29,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.control.nc.io.IOManager;
 import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
@@ -85,7 +84,7 @@
 
     public LSMTreeRunner(int numBatches, int inMemPageSize, int inMemNumPages, int onDiskPageSize, int onDiskNumPages,
             ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields,
-            double bloomFilterFalsePositiveRate) throws BTreeException, HyracksException {
+            double bloomFilterFalsePositiveRate) throws BTreeException, HyracksDataException {
         this.numBatches = numBatches;
 
         this.onDiskPageSize = onDiskPageSize;
@@ -97,7 +96,7 @@
         bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         ioManager = TestStorageManagerComponentHolder.getIOManager();
         ioDeviceId = 0;
-        file = ioManager.getFileRef(onDiskDir, false);
+        file = ioManager.resolveAbsolutePath(onDiskDir);
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
 
         List<IVirtualBufferCache> virtualBufferCaches = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
index f7387dc..9232ad5 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
@@ -30,7 +30,6 @@
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
@@ -100,14 +99,14 @@
         this.numMutableComponents = AccessMethodTestsConfig.LSM_BTREE_NUM_MUTABLE_COMPONENTS;
     }
 
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         ioManager = TestStorageManagerComponentHolder.getIOManager();
         ioDeviceId = 0;
         onDiskDir = ioManager.getIODevices().get(ioDeviceId).getMount() + sep + "lsm_btree_"
                 + simpleDateFormat.format(new Date()) + sep;
         ctx = TestUtils.create(getHyracksFrameSize());
         TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles);
-        file = ioManager.getFileRef(onDiskDir, false);
+        file = ioManager.resolveAbsolutePath(onDiskDir);
         diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         diskFileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
         virtualBufferCaches = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
index 87374fe..c16577b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
@@ -47,7 +47,7 @@
         String[] files = dir.list(filter);
         for (String fileName : files) {
             File file = new File(dir.getPath() + File.separator + fileName);
-            FileReference fileRef = ioManager.getFileRef(file.getAbsolutePath(), false);
+            FileReference fileRef = ioManager.resolveAbsolutePath(file.getAbsolutePath());
             allFiles.add(new ComparableFileName(fileRef));
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
index 3d3053e..71b2c64 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
@@ -34,7 +34,6 @@
 import java.util.concurrent.Executors;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
@@ -60,7 +59,7 @@
     protected FileReference file;
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         TestStorageManagerComponentHolder.init(DEFAULT_PAGE_SIZE, DEFAULT_NUM_PAGES, DEFAULT_MAX_OPEN_FILES);
         ioManager = TestStorageManagerComponentHolder.getIOManager();
         fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(null);
@@ -68,7 +67,7 @@
                 + simpleDateFormat.format(new Date()) + sep;
         File f = new File(baseDir);
         f.mkdirs();
-        file = ioManager.getFileRef(f.getAbsolutePath(), false);
+        file = ioManager.resolveAbsolutePath(f.getAbsolutePath());
     }
 
     @After
@@ -123,7 +122,7 @@
                 + simpleDateFormat.format(new Date()) + sep;
         File f = new File(dirPath);
         f.mkdirs();
-        FileReference file = ioManager.getFileRef(f.getAbsolutePath(), false);
+        FileReference file = ioManager.resolveAbsolutePath(f.getAbsolutePath());
         ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
                 new DummyTreeFactory());
         fileManager.createDirs();
@@ -243,7 +242,7 @@
 
     }
 
-    private IOManager createIOManager(int numDevices) throws HyracksException {
+    private IOManager createIOManager(int numDevices) throws HyracksDataException {
         List<IODeviceHandle> devices = new ArrayList<>();
         for (int i = 0; i < numDevices; i++) {
             String iodevPath = System.getProperty("java.io.tmpdir") + sep + "test_iodev" + i;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/VirtualBufferCacheTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/VirtualBufferCacheTest.java
index a7c541a..b0d0ecb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/VirtualBufferCacheTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/VirtualBufferCacheTest.java
@@ -113,7 +113,7 @@
         for (int i = 0; i < NUM_FILES; i++) {
             FileState f = fileStates[i];
             String fName = String.format("f%d", i);
-            f.fileRef = ioManager.getFileRef(fName, true);
+            f.fileRef = ioManager.resolve(fName);
             vbc.createFile(f.fileRef);
             f.fileId = vbc.getFileMapProvider().lookupFileId(f.fileRef);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
index 1afe4ee..df4062f 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
@@ -23,11 +23,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.junit.After;
-import org.junit.Before;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
@@ -38,6 +34,8 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.junit.After;
+import org.junit.Before;
 
 public abstract class AbstractInvertedIndexTest {
     protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexTest.class.getName());
@@ -61,7 +59,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
index 17b139e..2e608a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
@@ -29,7 +29,6 @@
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
@@ -98,7 +97,7 @@
         this.numMutableComponents = AccessMethodTestsConfig.LSM_INVINDEX_NUM_MUTABLE_COMPONENTS;
     }
 
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         ioManager = TestStorageManagerComponentHolder.getIOManager();
         ioDeviceId = 0;
         onDiskDir = ioManager.getIODevices().get(ioDeviceId).getMount() + sep + "lsm_invertedindex_"
@@ -115,7 +114,7 @@
             virtualBufferCache.open();
         }
         rnd.setSeed(RANDOM_SEED);
-        invIndexFileRef = ioManager.getFileRef(onDiskDir + invIndexFileName, false);
+        invIndexFileRef = ioManager.resolveAbsolutePath(onDiskDir + invIndexFileName);
     }
 
     public void tearDown() throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
index b4616c3..88a8abd 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
@@ -24,10 +24,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.junit.Test;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.common.api.IndexException;
@@ -39,6 +36,7 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.junit.Test;
 
 public class LSMInvertedIndexMultiThreadTest {
 
@@ -54,7 +52,7 @@
     protected final LSMInvertedIndexWorkerFactory workerFactory = new LSMInvertedIndexWorkerFactory();
     protected final ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
 
-    protected void setUp() throws HyracksException {
+    protected void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
@@ -63,7 +61,8 @@
     }
 
     protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numThreads,
-            TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException, HyracksException {
+            TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException,
+            HyracksDataException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("LSMInvertedIndex MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
                     + "; Workload: " + conf.toString() + ".");
@@ -85,7 +84,7 @@
     }
 
     protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
-        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+        ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<>();
 
         // Insert only workload.
         TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java
index 42c2654..c0f4bc2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexLifecycleTest.java
@@ -59,9 +59,9 @@
         IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
                 .of(IntegerPointable.FACTORY) };
         IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
-        FileReference btreeFile = harness.getIOManager().getFileRef(harness.getInvListsFileRef().getFile()
+        FileReference btreeFile = harness.getIOManager().resolveAbsolutePath(harness.getInvListsFileRef().getFile()
                 .getAbsolutePath()
-                + "_btree", false);
+                + "_btree");
         index = new OnDiskInvertedIndex(harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), invListBuilder,
                 invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
                 harness.getInvListsFileRef(), btreeFile);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
index f236434..dd0c9d1 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
@@ -131,14 +131,14 @@
                 invIndex = InvertedIndexUtils.createInMemoryBTreeInvertedindex(harness.getVirtualBufferCaches().get(0),
                         new VirtualMetaDataPageManager(harness.getVirtualBufferCaches().get(0).getNumPages()),
                         invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
-                        ioManager.getFileRef(harness.getOnDiskDir(), false));
+                        ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
                 break;
             }
             case PARTITIONED_INMEMORY: {
                 invIndex = InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(harness
                         .getVirtualBufferCaches().get(0), new VirtualMetaDataPageManager(harness.getVirtualBufferCaches()
                         .get(0).getNumPages()), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
-                        tokenCmpFactories, tokenizerFactory, ioManager.getFileRef(harness.getOnDiskDir(), false));
+                        tokenCmpFactories, tokenizerFactory, ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
                 break;
             }
             case ONDISK: {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
index 4d78dbe..beb3d8a 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
@@ -44,7 +43,7 @@
     private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
index 401ad63..0adb0e6 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
@@ -44,7 +43,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
index ac5bc64..0703ea2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
@@ -22,7 +22,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.TreeIndexException;
@@ -57,7 +56,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
index 803278e..7dbcc1b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
@@ -44,7 +43,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
index bf4d503..a8e2565 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestContext;
@@ -43,7 +42,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
index 9ea9b62..f77c960 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
@@ -44,7 +43,7 @@
     private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
index 08f828c..09db1fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
@@ -44,7 +43,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
index 1146351..e74ef54 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
@@ -22,7 +22,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.TreeIndexException;
@@ -55,7 +54,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
index 3c0f779..c98088e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
@@ -44,7 +43,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
index 0306393..7848edd8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
@@ -43,7 +42,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
index b1b8070..373ce1e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.IIndexTestWorkerFactory;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
@@ -49,7 +48,7 @@
     }
 
     @Override
-    protected void setUp() throws HyracksException {
+    protected void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
index 2693c70..1f2bada 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.IIndexTestWorkerFactory;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
@@ -49,7 +48,7 @@
     }
 
     @Override
-    protected void setUp() throws HyracksException {
+    protected void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
index c3e86ad..1d53f5a 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
@@ -30,7 +30,6 @@
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
 import org.apache.hyracks.control.nc.io.IOManager;
@@ -97,12 +96,12 @@
         this.numMutableComponents = AccessMethodTestsConfig.LSM_RTREE_NUM_MUTABLE_COMPONENTS;
     }
 
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         ioManager = TestStorageManagerComponentHolder.getIOManager();
         ioDeviceId = 0;
         onDiskDir = ioManager.getIODevices().get(ioDeviceId).getMount() + sep + "lsm_rtree_"
                 + simpleDateFormat.format(new Date()) + sep;
-        file = ioManager.getFileRef(onDiskDir, false);
+        file = ioManager.resolveAbsolutePath(onDiskDir);
         ctx = TestUtils.create(getHyracksFrameSize());
         TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles);
         diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
index a491b25..00154d8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -42,7 +41,7 @@
     private final RTreeTestHarness harness = new RTreeTestHarness();
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeDeleteTest.java
index 7169c93..4e13ceb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeDeleteTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -42,7 +41,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
index 9acea03..6fb5b5f 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
@@ -22,7 +22,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
 import org.apache.hyracks.storage.am.common.api.TreeIndexException;
@@ -41,7 +40,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeInsertTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeInsertTest.java
index 599a4dc..4382603 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeInsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeInsertTest.java
@@ -23,7 +23,6 @@
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -42,7 +41,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
index 538834b..9ed65df 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -26,7 +26,7 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
-import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import org.apache.hyracks.data.std.primitive.IntegerPointable;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -71,7 +71,7 @@
 
     @Override
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         super.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
index 3919a97..e4b5bcd 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
@@ -24,7 +24,6 @@
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.storage.am.common.IIndexTestWorkerFactory;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
@@ -49,7 +48,7 @@
     private final RTreeTestWorkerFactory workerFactory = new RTreeTestWorkerFactory();
 
     @Override
-    protected void setUp() throws HyracksException {
+    protected void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
index 7247cd1..1e49e8a 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
@@ -22,7 +22,6 @@
 import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.junit.After;
 import org.junit.Before;
 
@@ -39,7 +38,7 @@
     }
 
     @Before
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         harness.setUp();
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/RTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
index e84fc21..0d43ef6 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
@@ -26,7 +26,6 @@
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -70,11 +69,11 @@
         this.hyracksFrameSize = hyracksFrameSize;
     }
 
-    public void setUp() throws HyracksException {
+    public void setUp() throws HyracksDataException {
         TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
         IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
         fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        file = ioManager.getFileRef(fileName, false);
+        file = ioManager.resolveAbsolutePath(fileName);
         ctx = TestUtils.create(getHyracksFrameSize());
         bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         fileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
index c58d83c..7174a07 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
@@ -83,7 +83,7 @@
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
         IOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
 
-        FileReference firstFileRef = ioManager.getFileRef(fileName, false);
+        FileReference firstFileRef = ioManager.resolveAbsolutePath(fileName);
         bufferCache.createFile(firstFileRef);
         int firstFileId = fmp.lookupFileId(firstFileRef);
         bufferCache.openFile(firstFileId);
@@ -107,7 +107,7 @@
         }
 
         // Create a file with the same name.
-        FileReference secondFileRef = ioManager.getFileRef(fileName, false);
+        FileReference secondFileRef = ioManager.resolveAbsolutePath(fileName);
         bufferCache.createFile(secondFileRef);
         int secondFileId = fmp.lookupFileId(secondFileRef);
 
@@ -123,7 +123,7 @@
         // ask the BufferCache to pin the page, because it would return the same
         // physical memory again, and for performance reasons pages are never
         // reset with 0's.
-        FileReference testFileRef = ioManager.getFileRef(fileName, false);
+        FileReference testFileRef = ioManager.resolveAbsolutePath(fileName);
         IFileHandle testFileHandle = ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY,
                 FileSyncMode.METADATA_SYNC_DATA_SYNC);
         ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE + BufferCache.RESERVED_HEADER_BYTES);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheTest.java
index b96d114..3cf3186 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheTest.java
@@ -68,9 +68,9 @@
         TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
         IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        IOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
+        IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
         String fileName = getFileName();
-        FileReference file = ioManager.getFileRef(fileName, false);
+        FileReference file = ioManager.resolveAbsolutePath(fileName);
         bufferCache.createFile(file);
         int fileId = fmp.lookupFileId(file);
         int num = 10;
@@ -150,14 +150,14 @@
         TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
         IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        IOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
+        IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
 
         List<Integer> fileIds = new ArrayList<>();
 
         for (int i = 0; i < MAX_OPEN_FILES; i++) {
             String fileName = getFileName();
 
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
@@ -169,7 +169,7 @@
         // since all files are open, next open should fail
         try {
             String fileName = getFileName();
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
@@ -187,7 +187,7 @@
         exceptionThrown = false;
         try {
             String fileName = getFileName();
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
@@ -220,7 +220,7 @@
         // open max number of files and write some stuff into their first page
         for (int i = 0; i < MAX_OPEN_FILES; i++) {
             String fileName = getFileName();
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
@@ -248,7 +248,7 @@
         // since all files are open, next open should fail
         try {
             String fileName = getFileName();
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
@@ -270,7 +270,7 @@
         // now open a few new files
         for (int i = 0; i < filesToClose; i++) {
             String fileName = getFileName();
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
@@ -280,7 +280,7 @@
         // since all files are open, next open should fail
         try {
             String fileName = getFileName();
-            FileReference file = ioManager.getFileRef(fileName, false);
+            FileReference file = ioManager.resolveAbsolutePath(fileName);
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);