[ASTERIXDB-1564][CONF] Consistently Use Log4j2

- user model changes: no
- storage format changes: no
- interface changes: no

Details:
- Replace java.util.logging by Log4j2.
- Excluded classes due to their tests:
  - IoUtil
  - Tracer

Change-Id: Ic137571292f45de1f1994c61d328b97185012197
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2226
Sonar-Qube: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Contrib: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Michael Blow <mblow@apache.org>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/pom.xml b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
index 5e3ccf7..71bf5fc 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
@@ -104,5 +104,9 @@
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
index 0b2cc9b..cd6362f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
@@ -24,8 +24,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.dataflow.ActivityId;
@@ -35,9 +33,11 @@
 import org.apache.hyracks.api.job.ActivityClusterGraph;
 import org.apache.hyracks.api.job.ActivityClusterId;
 import org.apache.hyracks.api.job.JobActivityGraph;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ActivityClusterGraphBuilder {
-    private static final Logger LOGGER = Logger.getLogger(ActivityClusterGraphBuilder.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public ActivityClusterGraphBuilder() {
     }
@@ -146,8 +146,8 @@
         }
         acg.addActivityClusters(acList);
 
-        if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine(acg.toJSON().asText());
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug(acg.toJSON().asText());
         }
         return acg;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 084626e..64bcf6e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -24,8 +24,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.dataflow.ActivityId;
@@ -37,9 +35,11 @@
 import org.apache.hyracks.api.job.JobActivityGraph;
 import org.apache.hyracks.api.job.JobFlag;
 import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class JobActivityGraphBuilder implements IActivityGraphBuilder {
-    private static final Logger LOGGER = Logger.getLogger(JobActivityGraphBuilder.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final Map<ActivityId, IOperatorDescriptor> activityOperatorMap;
 
@@ -71,8 +71,8 @@
 
     @Override
     public void addSourceEdge(int operatorInputIndex, IActivity task, int taskInputIndex) {
-        if (LOGGER.isLoggable(Level.FINEST)) {
-            LOGGER.finest("Adding source edge: " + task.getActivityId() + ":" + operatorInputIndex + " -> "
+        if (LOGGER.isTraceEnabled()) {
+            LOGGER.trace("Adding source edge: " + task.getActivityId() + ":" + operatorInputIndex + " -> "
                     + task.getActivityId() + ":" + taskInputIndex);
         }
         IOperatorDescriptor op = activityOperatorMap.get(task.getActivityId());
@@ -83,8 +83,8 @@
 
     @Override
     public void addTargetEdge(int operatorOutputIndex, IActivity task, int taskOutputIndex) {
-        if (LOGGER.isLoggable(Level.FINEST)) {
-            LOGGER.finest("Adding target edge: " + task.getActivityId() + ":" + operatorOutputIndex + " -> "
+        if (LOGGER.isTraceEnabled()) {
+            LOGGER.trace("Adding target edge: " + task.getActivityId() + ":" + operatorOutputIndex + " -> "
                     + task.getActivityId() + ":" + taskOutputIndex);
         }
         IOperatorDescriptor op = activityOperatorMap.get(task.getActivityId());
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
index ddcaab3..80ff77c 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
@@ -22,7 +22,8 @@
 import java.util.List;
 import java.util.Set;
 import java.util.function.Predicate;
-import java.util.logging.Level;
+
+import org.apache.logging.log4j.Level;
 
 /**
  * Accessor for the data contained in the global application configuration file.
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
index 7ae7cbf..58b4b27 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
@@ -20,10 +20,11 @@
 package org.apache.hyracks.api.exceptions;
 
 import java.io.Serializable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.util.ErrorMessageUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * The main execution time exception type for runtime errors in a hyracks environment
@@ -31,7 +32,7 @@
 public class HyracksDataException extends HyracksException {
 
     private static final long serialVersionUID = 1L;
-    private static final Logger LOGGER = Logger.getLogger(HyracksDataException.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public static HyracksDataException create(Throwable cause) {
         if (cause instanceof HyracksDataException || cause == null) {
@@ -41,7 +42,7 @@
             throw (Error) cause;
         } else if (cause instanceof InterruptedException && !Thread.currentThread().isInterrupted()) {
             // TODO(mblow): why not force interrupt on current thread?
-            LOGGER.log(Level.WARNING,
+            LOGGER.log(Level.WARN,
                     "Wrapping an InterruptedException in HyracksDataException and current thread is not interrupted",
                     cause);
         }
@@ -66,7 +67,7 @@
             throw (Error) th;
         } else if (th instanceof InterruptedException && !Thread.currentThread().isInterrupted()) {
             // TODO(mblow): why not force interrupt on current thread?
-            LOGGER.log(Level.WARNING, "Suppressing an InterruptedException in a HyracksDataException and current "
+            LOGGER.log(Level.WARN, "Suppressing an InterruptedException in a HyracksDataException and current "
                     + "thread is not interrupted", th);
         }
         root.addSuppressed(th);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
index f5b4417..6d5d246 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
@@ -25,10 +25,11 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class LifeCycleComponentManager implements ILifeCycleComponentManager {
 
@@ -36,7 +37,7 @@
         public static final String DUMP_PATH_KEY = "DUMP_PATH";
     }
 
-    private static final Logger LOGGER = Logger.getLogger(LifeCycleComponentManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final List<ILifeCycleComponent> components;
     private boolean stopInitiated;
@@ -54,7 +55,7 @@
     @Override
     public void uncaughtException(Thread t, Throwable e) {
         try {
-            LOGGER.log(Level.SEVERE, "Uncaught Exception from thread " + t.getName() + ". Calling shutdown hook", e);
+            LOGGER.log(Level.ERROR, "Uncaught Exception from thread " + t.getName() + ". Calling shutdown hook", e);
         } finally {
             ExitUtil.exit(99);
         }
@@ -79,7 +80,7 @@
             return;
         }
         stopped = true;
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Attempting to stop " + this);
         }
         if (stopInitiated) {
@@ -87,14 +88,14 @@
             return;
         }
         if (!configured) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.severe("Lifecycle management not configured " + this);
+            if (LOGGER.isErrorEnabled()) {
+                LOGGER.error("Lifecycle management not configured " + this);
             }
             return;
         }
 
         stopInitiated = true;
-        LOGGER.severe("Stopping instance");
+        LOGGER.error("Stopping instance");
 
         FileOutputStream componentDumpStream = null;
         String componentDumpPath = null;
@@ -110,13 +111,13 @@
                     }
                     componentDumpStream = new FileOutputStream(f);
                 }
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("Stopping component instance " + component.getClass().getName() + "; dump state: "
                             + dumpState + ", dump path: " + componentDumpPath);
                 }
                 component.stop(dumpState, componentDumpStream);
             } catch (Exception e) {
-                LOGGER.log(Level.SEVERE, "Exception in stopping component " + component.getClass().getName(), e);
+                LOGGER.log(Level.ERROR, "Exception in stopping component " + component.getClass().getName(), e);
             } finally {
                 if (componentDumpStream != null) {
                     componentDumpStream.close();
@@ -132,11 +133,11 @@
         dumpPath = configuration.get(Config.DUMP_PATH_KEY);
         if (dumpPath == null) {
             dumpPath = System.getProperty("user.dir");
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("dump path not configured. Using current directory " + dumpPath);
+            if (LOGGER.isWarnEnabled()) {
+                LOGGER.warn("dump path not configured. Using current directory " + dumpPath);
             }
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("LifecycleComponentManager configured " + this);
         }
         configured = true;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
index e9491f3..26ce2c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
@@ -26,12 +26,14 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ErrorMessageUtil {
 
-    private static final Logger LOGGER = Logger.getLogger(ErrorMessageUtil.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     public static final String NONE = "";
     private static final String COMMA = ",";
 
@@ -99,7 +101,7 @@
             return fmt.out().toString();
         } catch (Exception e) {
             // Do not throw further exceptions during exception processing.
-            LOGGER.log(Level.WARNING, e.getLocalizedMessage(), e);
+            LOGGER.log(Level.WARN, e.getLocalizedMessage(), e);
             return e.getMessage();
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java
index 21965c7..9eb3b8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java
@@ -19,15 +19,16 @@
 package org.apache.hyracks.api.util;
 
 import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class SingleThreadEventProcessor<T> implements Runnable {
 
-    private static final Logger LOGGER = Logger.getLogger(SingleThreadEventProcessor.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final String name;
     private final LinkedBlockingQueue<T> eventInbox;
     private volatile Thread executorThread;
@@ -50,10 +51,10 @@
             } catch (InterruptedException e) {
                 Thread.currentThread().interrupt();
             } catch (Exception e) {
-                LOGGER.log(Level.SEVERE, "Error handling an event", e);
+                LOGGER.log(Level.ERROR, "Error handling an event", e);
             }
         }
-        LOGGER.log(Level.WARNING, "Stopped " + Thread.currentThread().getName());
+        LOGGER.log(Level.WARN, "Stopped " + Thread.currentThread().getName());
     }
 
     protected abstract void handle(T event) throws Exception; //NOSONAR
@@ -71,7 +72,7 @@
         int attempt = 0;
         while (executorThread.isAlive()) {
             attempt++;
-            LOGGER.log(Level.WARNING,
+            LOGGER.log(Level.WARN,
                     "Failed to stop event processor after " + attempt + " attempts. Interrupted exception swallowed?");
             if (attempt == 10) {
                 throw HyracksDataException.create(ErrorCode.FAILED_TO_SHUTDOWN_EVENT_PROCESSOR, name);
diff --git a/hyracks-fullstack/hyracks/hyracks-client/pom.xml b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
index 81c9963..16039ab 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
@@ -109,5 +109,9 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
index 31fd379..fc5708d 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
@@ -25,8 +25,6 @@
 import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.channels.IInputChannel;
 import org.apache.hyracks.api.comm.FrameHelper;
@@ -45,10 +43,13 @@
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.client.net.ClientNetworkManager;
 import org.apache.hyracks.comm.channels.DatasetNetworkInputChannel;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 // TODO(madhusudancs): Should this implementation be moved to org.apache.hyracks.client?
 public class HyracksDatasetReader implements IHyracksDatasetReader {
-    private static final Logger LOGGER = Logger.getLogger(HyracksDatasetReader.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection;
 
@@ -94,10 +95,10 @@
             return datasetDirectoryServiceConnection.getDatasetResultStatus(jobId, resultSetId);
         } catch (HyracksDataException e) {
             if (e.getErrorCode() != ErrorCode.NO_RESULT_SET) {
-                LOGGER.log(Level.WARNING, "Exception retrieving result set for job " + jobId, e);
+                LOGGER.log(Level.WARN, "Exception retrieving result set for job " + jobId, e);
             }
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Exception retrieving result set for job " + jobId, e);
+            LOGGER.log(Level.WARN, "Exception retrieving result set for job " + jobId, e);
         }
         return null;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/pom.xml b/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
index 3ca803c..db36094 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
@@ -51,5 +51,9 @@
       <artifactId>hyracks-net</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java
index e3c6f4a..c334389 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java
+++ b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java
@@ -22,8 +22,6 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayDeque;
 import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.channels.IInputChannel;
 import org.apache.hyracks.api.channels.IInputChannelMonitor;
@@ -34,9 +32,11 @@
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class DatasetNetworkInputChannel implements IInputChannel {
-    private static final Logger LOGGER = Logger.getLogger(DatasetNetworkInputChannel.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     static final int INITIAL_MESSAGE_SIZE = 20;
 
@@ -114,8 +114,8 @@
         writeBuffer.putLong(resultSetId.getId());
         writeBuffer.putInt(partition);
         writeBuffer.flip();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Sending partition request for JobId: " + jobId + " partition: " + partition + " on channel: "
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Sending partition request for JobId: " + jobId + " partition: " + partition + " on channel: "
                     + ccb);
         }
         ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java
index a846da3..0a9342a 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java
+++ b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java
@@ -22,8 +22,6 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayDeque;
 import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.channels.IInputChannel;
 import org.apache.hyracks.api.channels.IInputChannelMonitor;
@@ -33,9 +31,11 @@
 import org.apache.hyracks.api.context.IHyracksCommonContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.partitions.PartitionId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NetworkInputChannel implements IInputChannel {
-    private static final Logger LOGGER = Logger.getLogger(NetworkInputChannel.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     static final int INITIAL_MESSAGE_SIZE = 20;
 
@@ -107,8 +107,8 @@
         writeBuffer.putInt(partitionId.getSenderIndex());
         writeBuffer.putInt(partitionId.getReceiverIndex());
         writeBuffer.flip();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Sending partition request: " + partitionId + " on channel: " + ccb);
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Sending partition request: " + partitionId + " on channel: " + ccb);
         }
         ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
         ccb.getWriteInterface().getFullBufferAcceptor().close();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index fce37dd..763c5ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -93,5 +93,13 @@
       <version>2.0.2-beta</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java
index b2478a3..a7a64cc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.control.cc;
 
 import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.ICCApplication;
 import org.apache.hyracks.api.application.IServiceContext;
@@ -31,9 +29,13 @@
 import org.apache.hyracks.control.common.controllers.CCConfig;
 import org.apache.hyracks.control.common.controllers.ControllerConfig;
 import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.config.Configurator;
 
 public class BaseCCApplication implements ICCApplication {
-    private static final Logger LOGGER = Logger.getLogger(BaseCCApplication.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     public static final ICCApplication INSTANCE = new BaseCCApplication();
     private IConfigManager configManager;
 
@@ -83,7 +85,7 @@
 
     protected void configureLoggingLevel(Level level) {
         LOGGER.info("Setting Hyracks log level to " + level);
-        Logger.getLogger("org.apache.hyracks").setLevel(level);
+        Configurator.setLevel("org.apache.hyracks", level);
     }
 
     @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
index a78f6bb..a188594 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
@@ -22,17 +22,18 @@
 
 import java.io.IOException;
 import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.ICCApplication;
 import org.apache.hyracks.control.common.config.ConfigManager;
 import org.apache.hyracks.control.common.config.ConfigUtils;
 import org.apache.hyracks.control.common.controllers.CCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.kohsuke.args4j.CmdLineException;
 
 public class CCDriver {
-    private static final Logger LOGGER = Logger.getLogger(CCDriver.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private CCDriver() {
     }
@@ -49,10 +50,10 @@
                 Thread.sleep(100000);
             }
         } catch (CmdLineException e) {
-            LOGGER.log(Level.FINE, "Exception parsing command line: " + Arrays.toString(args), e);
+            LOGGER.log(Level.DEBUG, "Exception parsing command line: " + Arrays.toString(args), e);
             System.exit(2);
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Exiting CCDriver due to exception", e);
+            LOGGER.log(Level.ERROR, "Exiting CCDriver due to exception", e);
             System.exit(1);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
index 4e3c0f5..ccf798a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
@@ -18,9 +18,6 @@
  */
 package org.apache.hyracks.control.cc;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.client.HyracksClientInterfaceFunctions;
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.dataset.DatasetJobRecord.Status;
@@ -48,10 +45,13 @@
 import org.apache.hyracks.ipc.api.IIPCHandle;
 import org.apache.hyracks.ipc.api.IIPCI;
 import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 class ClientInterfaceIPCI implements IIPCI {
 
-    private static final Logger LOGGER = Logger.getLogger(ClientInterfaceIPCI.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final ClusterControllerService ccs;
     private final JobIdFactory jobIdFactory;
     private final DeployedJobSpecIdFactory deployedJobSpecIdFactory;
@@ -70,7 +70,7 @@
                 try {
                     handle.send(mid, ccs.getClusterControllerInfo(), null);
                 } catch (IPCException e) {
-                    LOGGER.log(Level.WARNING, "Error sending response to GET_CLUSTER_CONTROLLER_INFO request", e);
+                    LOGGER.log(Level.WARN, "Error sending response to GET_CLUSTER_CONTROLLER_INFO request", e);
                 }
                 break;
             case GET_JOB_STATUS:
@@ -146,7 +146,7 @@
                 try {
                     handle.send(mid, ccs.getCCContext().getClusterTopology(), null);
                 } catch (IPCException e) {
-                    LOGGER.log(Level.WARNING, "Error sending response to GET_CLUSTER_TOPOLOGY request", e);
+                    LOGGER.log(Level.WARN, "Error sending response to GET_CLUSTER_TOPOLOGY request", e);
                 }
                 break;
             case CLI_DEPLOY_BINARY:
@@ -184,7 +184,7 @@
                 try {
                     handle.send(mid, null, new IllegalArgumentException("Unknown function " + fn.getFunctionId()));
                 } catch (IPCException e) {
-                    LOGGER.log(Level.WARNING, "Error sending Unknown function response", e);
+                    LOGGER.log(Level.WARN, "Error sending Unknown function response", e);
                 }
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
index 5a53fce..ad0cb61 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
@@ -19,7 +19,6 @@
 package org.apache.hyracks.control.cc;
 
 import java.util.Map;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.control.cc.work.ApplicationMessageWork;
@@ -48,9 +47,11 @@
 import org.apache.hyracks.control.common.work.IResultCallback;
 import org.apache.hyracks.ipc.api.IIPCHandle;
 import org.apache.hyracks.ipc.api.IIPCI;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 class ClusterControllerIPCI implements IIPCI {
-    private static final Logger LOGGER = Logger.getLogger(ClusterControllerIPCI.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final ClusterControllerService ccs;
 
     ClusterControllerIPCI(ClusterControllerService ccs) {
@@ -161,7 +162,7 @@
                         .schedule(new NotifyThreadDumpResponse(ccs, tdrf.getRequestId(), tdrf.getThreadDumpJSON()));
                 break;
             default:
-                LOGGER.warning("Unknown function: " + fn.getFunctionId());
+                LOGGER.warn("Unknown function: " + fn.getFunctionId());
         }
     }
 }
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index fd53c25..360975d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -36,8 +36,6 @@
 import java.util.TreeMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.application.ICCApplication;
@@ -87,10 +85,13 @@
 import org.apache.hyracks.ipc.impl.IPCSystem;
 import org.apache.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
 import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.xml.sax.InputSource;
 
 public class ClusterControllerService implements IControllerService {
-    private static final Logger LOGGER = Logger.getLogger(ClusterControllerService.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final CCConfig ccConfig;
 
@@ -243,8 +244,8 @@
             jobManager = (IJobManager) jobManagerConstructor.newInstance(ccConfig, this, jobCapacityController);
         } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
                 | InvocationTargetException e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.log(Level.WARNING, "class " + ccConfig.getJobManagerClass() + " could not be used: ", e);
+            if (LOGGER.isWarnEnabled()) {
+                LOGGER.log(Level.WARN, "class " + ccConfig.getJobManagerClass() + " could not be used: ", e);
             }
             // Falls back to the default implementation if the user-provided class name is not valid.
             jobManager = new JobManager(ccConfig, this, jobCapacityController);
@@ -278,12 +279,12 @@
             @Override
             public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException {
                 // no-op, we don't care
-                LOGGER.log(Level.WARNING, "Getting notified that node: " + nodeId + " has joined. and we don't care");
+                LOGGER.log(Level.WARN, "Getting notified that node: " + nodeId + " has joined. and we don't care");
             }
 
             @Override
             public void notifyNodeFailure(Collection<String> deadNodeIds) throws HyracksException {
-                LOGGER.log(Level.WARNING, "Getting notified that nodes: " + deadNodeIds + " has failed");
+                LOGGER.log(Level.WARN, "Getting notified that nodes: " + deadNodeIds + " has failed");
             }
         });
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index 4928564..590a0f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -30,7 +30,6 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.client.NodeControllerInfo;
@@ -49,9 +48,11 @@
 import org.apache.hyracks.control.common.ipc.CCNCFunctions.AbortCCJobsFunction;
 import org.apache.hyracks.ipc.api.IIPCHandle;
 import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NodeManager implements INodeManager {
-    private static final Logger LOGGER = Logger.getLogger(NodeManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
     private final CCConfig ccConfig;
@@ -89,13 +90,13 @@
 
     @Override
     public void addNode(String nodeId, NodeControllerState ncState) throws HyracksException {
-        LOGGER.warning("addNode(" + nodeId + ") called");
+        LOGGER.warn("addNode(" + nodeId + ") called");
         if (nodeId == null || ncState == null) {
             throw HyracksException.create(ErrorCode.INVALID_INPUT_PARAMETER);
         }
         // Updates the node registry.
         if (nodeRegistry.containsKey(nodeId)) {
-            LOGGER.warning(
+            LOGGER.warn(
                     "Node with name " + nodeId + " has already registered; failing the node then re-registering.");
             removeDeadNode(nodeId);
         } else {
@@ -106,7 +107,7 @@
                 throw HyracksDataException.create(e);
             }
         }
-        LOGGER.warning("adding node to registry");
+        LOGGER.warn("adding node to registry");
         nodeRegistry.put(nodeId, ncState);
         // Updates the IP address to node names map.
         try {
@@ -119,7 +120,7 @@
             throw e;
         }
         // Updates the cluster capacity.
-        LOGGER.warning("updating cluster capacity");
+        LOGGER.warn("updating cluster capacity");
         resourceManager.update(nodeId, ncState.getCapacity());
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java
index 1cb07d0..a57baf5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java
@@ -26,8 +26,6 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ExecutorService;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.dataset.DatasetDirectoryRecord;
@@ -44,6 +42,9 @@
 import org.apache.hyracks.api.job.JobStatus;
 import org.apache.hyracks.control.common.dataset.ResultStateSweeper;
 import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * TODO(madhusudancs): The potential perils of this global dataset directory service implementation is that, the jobs
@@ -54,7 +55,7 @@
  */
 public class DatasetDirectoryService implements IDatasetDirectoryService {
 
-    private static final Logger LOGGER = Logger.getLogger(DatasetDirectoryService.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final long resultTTL;
 
@@ -75,7 +76,7 @@
 
     @Override
     public synchronized void notifyJobCreation(JobId jobId, JobSpecification spec) throws HyracksException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(getClass().getSimpleName() + " notified of new job " + jobId);
         }
         if (jobResultLocations.get(jobId) != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index 04166a4..ea37cdd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -27,8 +27,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
@@ -51,9 +49,11 @@
 import org.apache.hyracks.control.cc.job.Task;
 import org.apache.hyracks.control.cc.job.TaskCluster;
 import org.apache.hyracks.control.cc.job.TaskClusterId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 class ActivityClusterPlanner {
-    private static final Logger LOGGER = Logger.getLogger(ActivityClusterPlanner.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final JobExecutor executor;
 
@@ -74,7 +74,7 @@
 
         TaskCluster[] taskClusters = computeTaskClusters(ac, jobRun, activityPlanMap);
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Plan for " + ac);
             LOGGER.info("Built " + taskClusters.length + " Task Clusters");
             for (TaskCluster tc : taskClusters) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
index ab7a3db..ac06344 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
@@ -28,8 +28,6 @@
 import java.util.PriorityQueue;
 import java.util.Random;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.constraints.Constraint;
@@ -68,9 +66,12 @@
 import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
 import org.apache.hyracks.control.common.work.IResultCallback;
 import org.apache.hyracks.control.common.work.NoOpCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class JobExecutor {
-    private static final Logger LOGGER = Logger.getLogger(JobExecutor.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
 
@@ -196,7 +197,7 @@
         Set<TaskCluster> taskClusterRoots = new HashSet<>();
         findRunnableTaskClusterRoots(taskClusterRoots,
                 jobRun.getActivityClusterGraph().getActivityClusterMap().values());
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.log(Level.INFO,
                     "Runnable TC roots: " + taskClusterRoots + ", inProgressTaskClusters: " + inProgressTaskClusters);
         }
@@ -226,19 +227,19 @@
                 queue.add(new RankedRunnableTaskCluster(priority, tc));
             }
         }
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Ranked TCs: " + queue);
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Ranked TCs: " + queue);
         }
 
         Map<String, List<TaskAttemptDescriptor>> taskAttemptMap = new HashMap<>();
         for (RankedRunnableTaskCluster rrtc : queue) {
             TaskCluster tc = rrtc.getTaskCluster();
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Found runnable TC: " + tc);
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Found runnable TC: " + tc);
                 List<TaskClusterAttempt> attempts = tc.getAttempts();
-                LOGGER.fine("Attempts so far:" + attempts.size());
+                LOGGER.debug("Attempts so far:" + attempts.size());
                 for (TaskClusterAttempt tcAttempt : attempts) {
-                    LOGGER.fine("Status: " + tcAttempt.getStatus());
+                    LOGGER.debug("Status: " + tcAttempt.getStatus());
                 }
             }
             assignTaskLocations(tc, taskAttemptMap);
@@ -258,16 +259,16 @@
      * Runnability(Non-schedulable TaskCluster) = {NOT_RUNNABLE, _}
      */
     private Runnability assignRunnabilityRank(TaskCluster goal, Map<TaskCluster, Runnability> runnabilityMap) {
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Computing runnability: " + goal);
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Computing runnability: " + goal);
         }
         if (runnabilityMap.containsKey(goal)) {
             return runnabilityMap.get(goal);
         }
         TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(goal);
         if (lastAttempt != null) {
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Last Attempt Status: " + lastAttempt.getStatus());
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Last Attempt Status: " + lastAttempt.getStatus());
             }
             if (lastAttempt.getStatus() == TaskClusterAttempt.TaskClusterStatus.COMPLETED) {
                 Runnability runnability = new Runnability(Runnability.Tag.COMPLETED, Integer.MIN_VALUE);
@@ -284,15 +285,15 @@
         PartitionMatchMaker pmm = jobRun.getPartitionMatchMaker();
         Runnability aggregateRunnability = new Runnability(Runnability.Tag.RUNNABLE, 0);
         for (PartitionId pid : goal.getRequiredPartitions()) {
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Inspecting required partition: " + pid);
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Inspecting required partition: " + pid);
             }
             Runnability runnability;
             ConnectorDescriptorId cdId = pid.getConnectorDescriptorId();
             IConnectorPolicy cPolicy = connectorPolicyMap.get(cdId);
             PartitionState maxState = pmm.getMaximumAvailableState(pid);
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Policy: " + cPolicy + " maxState: " + maxState);
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Policy: " + cPolicy + " maxState: " + maxState);
             }
             if (PartitionState.COMMITTED.equals(maxState)) {
                 runnability = new Runnability(Runnability.Tag.RUNNABLE, 0);
@@ -328,8 +329,8 @@
                 // already not runnable -- cannot get better. bail.
                 break;
             }
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("aggregateRunnability: " + aggregateRunnability);
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("aggregateRunnability: " + aggregateRunnability);
             }
         }
         runnabilityMap.put(goal, aggregateRunnability);
@@ -511,8 +512,8 @@
                 if (node != null) {
                     node.getActiveJobIds().add(jobRun.getJobId());
                     boolean changed = jobRun.getParticipatingNodeIds().add(nodeId);
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("Starting: " + taskDescriptors + " at " + entry.getKey());
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("Starting: " + taskDescriptors + " at " + entry.getKey());
                     }
                     byte[] jagBytes = changed ? acgBytes : null;
                     node.getNodeController().startTasks(deploymentId, jobId, jagBytes, taskDescriptors,
@@ -537,14 +538,14 @@
 
     private void abortTaskCluster(TaskClusterAttempt tcAttempt,
             TaskClusterAttempt.TaskClusterStatus failedOrAbortedStatus) {
-        LOGGER.fine("Aborting task cluster: " + tcAttempt.getAttempt());
+        LOGGER.debug("Aborting task cluster: " + tcAttempt.getAttempt());
         Set<TaskAttemptId> abortTaskIds = new HashSet<>();
         Map<String, List<TaskAttemptId>> abortTaskAttemptMap = new HashMap<>();
         for (TaskAttempt ta : tcAttempt.getTaskAttempts().values()) {
             TaskAttemptId taId = ta.getTaskAttemptId();
             TaskAttempt.TaskStatus status = ta.getStatus();
             abortTaskIds.add(taId);
-            LOGGER.fine("Checking " + taId + ": " + ta.getStatus());
+            LOGGER.debug("Checking " + taId + ": " + ta.getStatus());
             if (status == TaskAttempt.TaskStatus.RUNNING || status == TaskAttempt.TaskStatus.COMPLETED) {
                 ta.setStatus(TaskAttempt.TaskStatus.ABORTED, null);
                 ta.setEndTime(System.currentTimeMillis());
@@ -564,13 +565,13 @@
         abortTaskAttemptMap.forEach((key, abortTaskAttempts) -> {
             final NodeControllerState node = nodeManager.getNodeControllerState(key);
             if (node != null) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("Aborting: " + abortTaskAttempts + " at " + key);
                 }
                 try {
                     node.getNodeController().abortTasks(jobId, abortTaskAttempts);
                 } catch (Exception e) {
-                    LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                    LOGGER.log(Level.ERROR, e.getMessage(), e);
                 }
             }
         });
@@ -648,13 +649,13 @@
             TaskCluster tc = ta.getTask().getTaskCluster();
             TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(tc);
             if (lastAttempt == null || taId.getAttempt() != lastAttempt.getAttempt()) {
-                LOGGER.warning(() -> "Ignoring task complete notification: " + taId + " -- Current last attempt = "
+                LOGGER.warn(() -> "Ignoring task complete notification: " + taId + " -- Current last attempt = "
                         + lastAttempt);
                 return;
             }
             TaskAttempt.TaskStatus taStatus = ta.getStatus();
             if (taStatus != TaskAttempt.TaskStatus.RUNNING) {
-                LOGGER.warning(() -> "Spurious task complete notification: " + taId + " Current state = " + taStatus);
+                LOGGER.warn(() -> "Spurious task complete notification: " + taId + " Current state = " + taStatus);
                 return;
             }
             ta.setStatus(TaskAttempt.TaskStatus.COMPLETED, null);
@@ -666,7 +667,7 @@
                 startRunnableActivityClusters();
             }
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, e, () -> "Unexpected failure. Aborting job " + jobRun.getJobId());
+            LOGGER.error(() -> "Unexpected failure. Aborting job " + jobRun.getJobId(), e);
             abortJob(Collections.singletonList(e), NoOpCallback.INSTANCE);
         }
     }
@@ -701,7 +702,7 @@
                 LOGGER.log(Level.INFO, "We will try to start runnable activity clusters of " + ta.getTaskAttemptId());
                 startRunnableActivityClusters();
             } else {
-                LOGGER.warning(
+                LOGGER.warn(
                         "Ignoring task failure notification: " + taId + " -- Current last attempt = " + lastAttempt);
             }
         } catch (Exception e) {
@@ -729,7 +730,7 @@
                     ta -> HyracksException.create(ErrorCode.NODE_FAILED, ta.getNodeId()));
             startRunnableActivityClusters();
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, e, () -> "Unexpected failure. Aborting job " + jobRun.getJobId());
+            LOGGER.error(() -> "Unexpected failure. Aborting job " + jobRun.getJobId(), e);
             abortJob(Collections.singletonList(e), NoOpCallback.INSTANCE);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
index 7f1100b..ffb72c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
@@ -29,8 +29,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksException;
@@ -48,6 +46,9 @@
 import org.apache.hyracks.control.common.controllers.CCConfig;
 import org.apache.hyracks.control.common.work.IResultCallback;
 import org.apache.hyracks.control.common.work.NoOpCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -55,7 +56,7 @@
 // Job manager manages all jobs that haven been submitted to the cluster.
 public class JobManager implements IJobManager {
 
-    private static final Logger LOGGER = Logger.getLogger(JobManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
     private final Map<JobId, JobRun> activeRunMap;
@@ -74,8 +75,8 @@
             jobQueue = (IJobQueue) jobQueueConstructor.newInstance(this, this.jobCapacityController);
         } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
                 | InvocationTargetException e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.log(Level.WARNING, "class " + ccConfig.getJobQueueClass() + " could not be used: ", e);
+            if (LOGGER.isWarnEnabled()) {
+                LOGGER.log(Level.WARN, "class " + ccConfig.getJobQueueClass() + " could not be used: ", e);
             }
             // Falls back to the default implementation if the user-provided class name is not valid.
             jobQueue = new FIFOJobQueue(this, jobCapacityController);
@@ -158,7 +159,7 @@
             return;
         }
         if (run.getPendingStatus() != null) {
-            LOGGER.warning("Ignoring duplicate cleanup for JobRun with id: " + jobId);
+            LOGGER.warn("Ignoring duplicate cleanup for JobRun with id: " + jobId);
             return;
         }
         Set<String> targetNodes = run.getParticipatingNodeIds();
@@ -179,7 +180,7 @@
                         ncs.getNodeController().cleanUpJoblet(jobId, status);
                     }
                 } catch (Exception e) {
-                    LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                    LOGGER.log(Level.ERROR, e.getMessage(), e);
                     if (caughtException == null) {
                         caughtException = HyracksException.create(e);
                     } else {
@@ -212,7 +213,7 @@
             try {
                 serviceCtx.notifyJobFinish(jobId, run.getPendingStatus(), run.getPendingExceptions());
             } catch (HyracksException e) {
-                LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                LOGGER.log(Level.ERROR, e.getMessage(), e);
                 caughtException = e;
             }
         }
@@ -229,7 +230,7 @@
             try {
                 ccs.getJobLogFile().log(createJobLogObject(run));
             } catch (Exception e) {
-                LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                LOGGER.log(Level.ERROR, e.getMessage(), e);
                 if (caughtException == null) {
                     caughtException = new HyracksException(e);
                 } else {
@@ -320,7 +321,7 @@
         try {
             run.getExecutor().startJob();
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Aborting " + run.getJobId() + " due to failure during job start", e);
+            LOGGER.log(Level.ERROR, "Aborting " + run.getJobId() + " due to failure during job start", e);
             final List<Exception> exceptions = Collections.singletonList(e);
             // fail the job then abort it
             run.setStatus(JobStatus.FAILURE, exceptions);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
index 3a5e3be..c5e51a6 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
@@ -25,7 +25,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.dataflow.TaskAttemptId;
@@ -33,9 +32,11 @@
 import org.apache.hyracks.control.common.job.PartitionDescriptor;
 import org.apache.hyracks.control.common.job.PartitionRequest;
 import org.apache.hyracks.control.common.job.PartitionState;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class PartitionMatchMaker {
-    private static final Logger LOGGER = Logger.getLogger(PartitionMatchMaker.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final Map<PartitionId, List<PartitionDescriptor>> partitionDescriptors;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java
index da13091..2a6bdae 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java
@@ -26,8 +26,6 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksException;
@@ -39,6 +37,9 @@
 import org.apache.hyracks.control.cc.job.JobRun;
 import org.apache.hyracks.util.annotations.NotThreadSafe;
 import org.apache.hyracks.util.annotations.ThreadSafetyGuaranteedBy;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * An implementation of IJobQueue that gives more priority to jobs that are submitted earlier.
@@ -47,7 +48,7 @@
 @ThreadSafetyGuaranteedBy("JobManager")
 public class FIFOJobQueue implements IJobQueue {
 
-    private static final Logger LOGGER = Logger.getLogger(FIFOJobQueue.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final Map<JobId, JobRun> jobListMap = new LinkedHashMap<>();
     private final IJobManager jobManager;
@@ -104,7 +105,7 @@
                     // Fails the job.
                     jobManager.prepareComplete(run, JobStatus.FAILURE_BEFORE_EXECUTION, exceptions);
                 } catch (HyracksException e) {
-                    LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                    LOGGER.log(Level.ERROR, e.getMessage(), e);
                 }
             }
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java
index 2b0382b..f400978 100755
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java
@@ -24,8 +24,6 @@
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
@@ -35,6 +33,9 @@
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.AbstractServlet;
 import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.buffer.ByteBuf;
 import io.netty.handler.codec.http.HttpMethod;
@@ -42,7 +43,7 @@
 
 public class ApplicationInstallationHandler extends AbstractServlet {
 
-    private static final Logger LOGGER = Logger.getLogger(ApplicationInstallationHandler.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private ClusterControllerService ccs;
 
@@ -81,7 +82,7 @@
                 response.setStatus(HttpResponseStatus.METHOD_NOT_ALLOWED);
             }
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Unhandled exception ", e);
+            LOGGER.log(Level.WARN, "Unhandled exception ", e);
             response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
index b39915f..479004d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
@@ -20,13 +20,14 @@
 
 import java.io.IOException;
 import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.AbstractServlet;
 import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -35,7 +36,7 @@
 
 public class JSONOutputRequestHandler extends AbstractServlet {
 
-    private static final Logger LOGGER = Logger.getLogger(JSONOutputRequestHandler.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final IJSONOutputFunction fn;
 
     public JSONOutputRequestHandler(ConcurrentMap<String, Object> ctx, String[] paths, IJSONOutputFunction fn) {
@@ -63,7 +64,7 @@
         try {
             return fn.invoke(host, servletPath, parts);
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Exception invoking " + fn.getClass().getName(), e);
+            LOGGER.log(Level.WARN, "Exception invoking " + fn.getClass().getName(), e);
             response.setStatus(HttpResponseStatus.BAD_REQUEST);
             response.writer().print(e.getMessage());
         }
@@ -77,7 +78,7 @@
             om.writer().writeValue(response.writer(), result);
             response.setStatus(HttpResponseStatus.OK);
         } catch (IOException e) {
-            LOGGER.log(Level.WARNING, "Exception delivering result in " + getClass().getName(), e);
+            LOGGER.log(Level.WARN, "Exception delivering result in " + getClass().getName(), e);
             response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
             response.writer().print(e.getMessage());
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java
index 341834c..392046d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java
@@ -18,21 +18,21 @@
  */
 package org.apache.hyracks.control.cc.work;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.application.ICCServiceContext;
 import org.apache.hyracks.api.deployment.DeploymentId;
 import org.apache.hyracks.api.messages.IMessage;
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.deployment.DeploymentUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * @author rico
  */
 public class ApplicationMessageWork extends AbstractHeartbeatWork {
 
-    private static final Logger LOGGER = Logger.getLogger(ApplicationMessageWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private byte[] message;
     private DeploymentId deploymentId;
     private String nodeId;
@@ -63,7 +63,7 @@
                 }
             });
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Error in stats reporting", e);
+            LOGGER.log(Level.WARN, "Error in stats reporting", e);
             throw new RuntimeException(e);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
index 613efad..b44c58c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.control.cc.work;
 
 import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.cc.NodeControllerState;
@@ -31,9 +29,12 @@
 import org.apache.hyracks.control.common.work.SynchronizableWork;
 import org.apache.hyracks.ipc.exceptions.IPCException;
 import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ClusterShutdownWork extends SynchronizableWork {
-    private static final Logger LOGGER = Logger.getLogger(ClusterShutdownWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
     private final boolean terminateNCService;
@@ -76,7 +77,7 @@
                         /*
                          * best effort - just exit, user will have to kill misbehaving NCs
                          */
-                        LOGGER.severe("Clean shutdown of NCs timed out- giving up; unresponsive nodes: " +
+                        LOGGER.error("Clean shutdown of NCs timed out- giving up; unresponsive nodes: " +
                                 shutdownStatus.getRemainingNodes());
                     }
                     callback.setValue(cleanShutdown);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
index 407f9cd..e1b59e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
@@ -22,8 +22,6 @@
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.cc.NodeControllerState;
@@ -31,9 +29,12 @@
 import org.apache.hyracks.util.ThreadDumpUtil;
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class GetThreadDumpWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(GetThreadDumpWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     public static final int TIMEOUT_SECS = 60;
 
     private final ClusterControllerService ccs;
@@ -56,7 +57,7 @@
             try {
                 callback.setValue(ThreadDumpUtil.takeDumpJSONString());
             } catch (Exception e) {
-                LOGGER.log(Level.WARNING, "Exception taking CC thread dump", e);
+                LOGGER.log(Level.WARN, "Exception taking CC thread dump", e);
                 callback.setException(e);
             }
         } else {
@@ -82,7 +83,7 @@
                             Thread.sleep(sleepTime);
                         }
                         if (ccs.removeThreadDumpRun(run.getRequestId()) != null) {
-                            LOGGER.log(Level.WARNING, "Timed out thread dump request " + run.getRequestId()
+                            LOGGER.log(Level.WARN, "Timed out thread dump request " + run.getRequestId()
                                     + " for node " + nodeId);
                             callback.setException(new TimeoutException("Thread dump request for node " + nodeId
                                     + " timed out after " + TIMEOUT_SECS + " seconds."));
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
index bb85c13..f847cdb 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
@@ -20,8 +20,6 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.job.JobId;
@@ -30,9 +28,11 @@
 import org.apache.hyracks.control.cc.job.JobRun;
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class JobCleanupWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(JobCleanupWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private IJobManager jobManager;
     private JobId jobId;
@@ -51,7 +51,7 @@
 
     @Override
     public void run() {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Cleanup for JobRun with id: " + jobId);
         }
         try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
index 337e88f..b3b33c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
@@ -21,8 +21,6 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.job.JobId;
@@ -32,9 +30,12 @@
 import org.apache.hyracks.control.cc.cluster.INodeManager;
 import org.apache.hyracks.control.cc.job.IJobManager;
 import org.apache.hyracks.control.cc.job.JobRun;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class JobletCleanupNotificationWork extends AbstractHeartbeatWork {
-    private static final Logger LOGGER = Logger.getLogger(JobletCleanupNotificationWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private ClusterControllerService ccs;
     private JobId jobId;
@@ -52,12 +53,12 @@
         IJobManager jobManager = ccs.getJobManager();
         final JobRun run = jobManager.get(jobId);
         if (run == null) {
-            LOGGER.log(Level.WARNING, () -> "ignoring unknown job " + jobId + " on notification from " + nodeId);
+            LOGGER.log(Level.WARN, () -> "ignoring unknown job " + jobId + " on notification from " + nodeId);
             return;
         }
         Set<String> cleanupPendingNodes = run.getCleanupPendingNodeIds();
         if (!cleanupPendingNodes.remove(nodeId)) {
-            LOGGER.log(Level.WARNING, () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes +
+            LOGGER.log(Level.WARN, () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes +
                     " for job " + jobId);
             return;
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java
index 120f415..5c98035 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java
@@ -18,10 +18,9 @@
  */
 package org.apache.hyracks.control.cc.work;
 
-import java.util.logging.Level;
-
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.heartbeat.HeartbeatData;
+import org.apache.logging.log4j.Level;
 
 public class NodeHeartbeatWork extends AbstractHeartbeatWork {
 
@@ -36,6 +35,6 @@
 
     @Override
     public Level logLevel() {
-        return Level.FINEST;
+        return Level.TRACE;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java
index 83cbb91..d9f37fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java
@@ -19,15 +19,15 @@
 
 package org.apache.hyracks.control.cc.work;
 
-import java.util.logging.Logger;
-
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.shutdown.ShutdownRun;
 import org.apache.hyracks.control.common.work.SynchronizableWork;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NotifyShutdownWork extends SynchronizableWork {
 
-    private static final Logger LOGGER = Logger.getLogger(NotifyShutdownWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final ClusterControllerService ccs;
     private final String nodeId;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
index 2dae4b0..1cf443e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
@@ -18,13 +18,13 @@
  */
 package org.apache.hyracks.control.cc.work;
 
-import java.util.logging.Logger;
-
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NotifyThreadDumpResponse extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(NotifyThreadDumpResponse.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
 
@@ -39,10 +39,10 @@
 
     @Override
     public void run() {
-        LOGGER.fine("Delivering thread dump response: " + requestId);
+        LOGGER.debug("Delivering thread dump response: " + requestId);
         final GetThreadDumpWork.ThreadDumpRun threadDumpRun = ccs.removeThreadDumpRun(requestId);
         if (threadDumpRun == null) {
-            LOGGER.warning("Thread dump run " + requestId + " not found; discarding reply: " + threadDumpJSON);
+            LOGGER.warn("Thread dump run " + requestId + " not found; discarding reply: " + threadDumpJSON);
         } else {
             threadDumpRun.notifyThreadDumpReceived(threadDumpJSON);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
index d1d2208..07b0f04 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
@@ -20,8 +20,6 @@
 
 import java.util.HashMap;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.config.IApplicationConfig;
 import org.apache.hyracks.api.config.IOption;
@@ -34,9 +32,12 @@
 import org.apache.hyracks.control.common.ipc.NodeControllerRemoteProxy;
 import org.apache.hyracks.control.common.work.SynchronizableWork;
 import org.apache.hyracks.ipc.api.IIPCHandle;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class RegisterNodeWork extends SynchronizableWork {
-    private static final Logger LOGGER = Logger.getLogger(RegisterNodeWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
     private final NodeRegistration reg;
@@ -53,7 +54,7 @@
         CCNCFunctions.NodeRegistrationResult result;
         Map<IOption, Object> ncConfiguration = new HashMap<>();
         try {
-            LOGGER.log(Level.WARNING, "Registering INodeController: id = " + id);
+            LOGGER.log(Level.WARN, "Registering INodeController: id = " + id);
             NodeControllerRemoteProxy nc =
                     new NodeControllerRemoteProxy(ccs.getClusterIPC(), reg.getNodeControllerAddress());
             NodeControllerState state = new NodeControllerState(nc, reg);
@@ -72,12 +73,12 @@
             result = new CCNCFunctions.NodeRegistrationResult(params, null);
             ccs.getJobIdFactory().ensureMinimumId(reg.getMaxJobId() + 1);
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Node registration failed", e);
+            LOGGER.log(Level.WARN, "Node registration failed", e);
             result = new CCNCFunctions.NodeRegistrationResult(null, e);
         }
-        LOGGER.warning("sending registration response to node");
+        LOGGER.warn("sending registration response to node");
         ncIPCHandle.send(-1, result, null);
-        LOGGER.warning("notifying node join");
+        LOGGER.warn("notifying node join");
         ccs.getContext().notifyNodeJoin(id, ncConfiguration);
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
index 7117b6f..ad36701 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
@@ -20,8 +20,6 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.dataset.ResultSetId;
@@ -31,10 +29,13 @@
 import org.apache.hyracks.control.cc.job.JobRun;
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.common.work.NoOpCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class RegisterResultPartitionLocationWork extends AbstractWork {
 
-    private static final Logger LOGGER = Logger.getLogger(RegisterResultPartitionLocationWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
 
@@ -70,7 +71,7 @@
             ccs.getDatasetDirectoryService().registerResultPartitionLocation(jobId, rsId, orderedResult, emptyResult,
                     partition, nPartitions, networkAddress);
         } catch (HyracksDataException e) {
-            LOGGER.log(Level.WARNING, "Failed to register partition location", e);
+            LOGGER.log(Level.WARN, "Failed to register partition location", e);
             // Should fail the job if exists on cc, otherwise, do nothing
             JobRun jobRun = ccs.getJobManager().get(jobId);
             if (jobRun != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
index a162708..ee10669 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.control.cc.work;
 
 import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.exceptions.HyracksException;
@@ -30,9 +28,12 @@
 import org.apache.hyracks.control.cc.job.IJobManager;
 import org.apache.hyracks.control.cc.job.JobRun;
 import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class RemoveDeadNodesWork extends AbstractWork {
-    private static Logger LOGGER = Logger.getLogger(RemoveDeadNodesWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
 
@@ -49,7 +50,7 @@
             Collection<JobId> affectedJobIds = result.getRight();
             int size = affectedJobIds.size();
             if (size > 0) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("Number of affected jobs: " + size);
                 }
                 IJobManager jobManager = ccs.getJobManager();
@@ -64,12 +65,12 @@
                 ccs.getContext().notifyNodeFailure(deadNodes);
             }
         } catch (HyracksException e) {
-            LOGGER.log(Level.WARNING, "Uncaught exception on notifyNodeFailure", e);
+            LOGGER.log(Level.WARN, "Uncaught exception on notifyNodeFailure", e);
         }
     }
 
     @Override
     public Level logLevel() {
-        return Level.FINE;
+        return Level.DEBUG;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java
index 02806a0..e5e6f43 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java
@@ -20,12 +20,12 @@
 package org.apache.hyracks.control.cc.work;
 
 import java.util.List;
-import java.util.logging.Level;
 
 import org.apache.hyracks.control.cc.job.IJobManager;
 import org.apache.hyracks.control.cc.job.JobRun;
 import org.apache.hyracks.control.common.job.profiling.om.JobProfile;
 import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.Level;
 
 public class ReportProfilesWork extends AbstractWork {
     private final IJobManager jobManager;
@@ -49,6 +49,6 @@
 
     @Override
     public Level logLevel() {
-        return Level.FINEST;
+        return Level.TRACE;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
index dfc22b1..aef331f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
@@ -23,11 +23,12 @@
 import java.io.IOException;
 import java.io.ObjectOutputStream;
 import java.net.Socket;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
 import org.apache.hyracks.control.common.work.SynchronizableWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * A work which is run at CC shutdown for each NC specified in the configuration file.
@@ -35,7 +36,7 @@
  */
 public class ShutdownNCServiceWork extends SynchronizableWork {
 
-    private static final Logger LOGGER = Logger.getLogger(ShutdownNCServiceWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final String ncHost;
     private final int ncPort;
@@ -55,7 +56,7 @@
             oos.writeUTF(ServiceCommand.TERMINATE.name());
             oos.close();
         } catch (IOException e) {
-            LOGGER.log(Level.WARNING, "Failed to contact NC service '" + ncId + "' at " + ncHost + ":" + ncPort, e);
+            LOGGER.log(Level.WARN, "Failed to contact NC service '" + ncId + "' at " + ncHost + ":" + ncPort, e);
         }
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java
index 8f50087..a2be15c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.control.cc.work;
 
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.TaskAttemptId;
 import org.apache.hyracks.api.job.JobId;
@@ -28,9 +26,12 @@
 import org.apache.hyracks.control.cc.job.IJobManager;
 import org.apache.hyracks.control.cc.job.JobRun;
 import org.apache.hyracks.control.cc.job.TaskAttempt;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class TaskFailureWork extends AbstractTaskLifecycleWork {
-    private static final Logger LOGGER = Logger.getLogger(TaskFailureWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final List<Exception> exceptions;
 
     public TaskFailureWork(ClusterControllerService ccs, JobId jobId, TaskAttemptId taId, String nodeId,
@@ -41,7 +42,7 @@
 
     @Override
     protected void performEvent(TaskAttempt ta) {
-        LOGGER.log(Level.WARNING, "Executing task failure work for " + this, exceptions.get(0));
+        LOGGER.log(Level.WARN, "Executing task failure work for " + this, exceptions.get(0));
         IJobManager jobManager = ccs.getJobManager();
         JobRun run = jobManager.get(jobId);
         ccs.getDatasetDirectoryService().reportJobFailure(jobId, exceptions);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
index ab526e8..2f80f5b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
@@ -24,14 +24,15 @@
 import java.io.ObjectOutputStream;
 import java.io.StringWriter;
 import java.net.Socket;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.config.Section;
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.controllers.NCConfig;
 import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
 import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.ini4j.Ini;
 
 /**
@@ -40,7 +41,7 @@
  */
 public class TriggerNCWork extends AbstractWork {
 
-    private static final Logger LOGGER = Logger.getLogger(TriggerNCWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ClusterControllerService ccs;
     private final String ncHost;
@@ -67,7 +68,7 @@
                     return;
                     // QQQ Should probably have an ACK here
                 } catch (IOException e) {
-                    LOGGER.log(Level.WARNING, "Failed to contact NC service at " + ncHost + ":" + ncPort
+                    LOGGER.log(Level.WARN, "Failed to contact NC service at " + ncHost + ":" + ncPort
                             + "; will retry", e);
                 }
                 try {
@@ -93,8 +94,8 @@
         // entry point so that NCs can determine where all their config is.
         ccini.put(Section.LOCALNC.sectionName(), NCConfig.Option.NODE_ID.ini(), ncId);
         ccini.store(iniString);
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Returning Ini file:\n" + iniString.toString());
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Returning Ini file:\n" + iniString.toString());
         }
         return iniString.toString();
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
index e020ef2..0f12936 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -79,5 +79,9 @@
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-lang3</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index 142afbf..7d3ada5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -40,8 +40,6 @@
 import java.util.function.Function;
 import java.util.function.Predicate;
 import java.util.function.Supplier;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
@@ -54,6 +52,9 @@
 import org.apache.hyracks.api.config.Section;
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.ini4j.Ini;
 import org.ini4j.Profile;
 import org.kohsuke.args4j.CmdLineException;
@@ -64,7 +65,7 @@
 public class ConfigManager implements IConfigManager, Serializable {
 
     private static final long serialVersionUID = 1L;
-    private static final Logger LOGGER = Logger.getLogger(ConfigManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private HashSet<IOption> registeredOptions = new HashSet<>();
     private HashMap<IOption, Object> definedMap = new HashMap<>();
@@ -143,7 +144,7 @@
             if (configured) {
                 throw new IllegalStateException("configuration already processed");
             }
-            LOGGER.fine("registering option: " + option.toIniString());
+            LOGGER.debug("registering option: " + option.toIniString());
             Map<String, IOption> optionMap = sectionMap.computeIfAbsent(option.section(), section -> new HashMap<>());
             IOption prev = optionMap.put(option.ini(), option);
             if (prev != null) {
@@ -157,9 +158,9 @@
                         (node, value,
                                 isDefault) -> correctedMap(option.section() == Section.NC ? node : null, isDefault)
                                         .put(option, value));
-                if (LOGGER.isLoggable(Level.FINE)) {
+                if (LOGGER.isDebugEnabled()) {
                     optionSetters.put(option, (node, value, isDefault) -> LOGGER
-                            .fine((isDefault ? "defaulting" : "setting ") + option.toIniString() + " to " + value));
+                            .debug((isDefault ? "defaulting" : "setting ") + option.toIniString() + " to " + value));
                 }
             }
         }
@@ -171,12 +172,12 @@
     }
 
     public void ensureNode(String nodeId) {
-        LOGGER.fine("ensureNode: " + nodeId);
+        LOGGER.debug("ensureNode: " + nodeId);
         nodeSpecificMap.computeIfAbsent(nodeId, this::createNodeSpecificMap);
     }
 
     private Map<IOption, Object> createNodeSpecificMap(String nodeId) {
-        LOGGER.fine("createNodeSpecificMap: " + nodeId);
+        LOGGER.debug("createNodeSpecificMap: " + nodeId);
         return Collections.synchronizedMap(new HashMap<>());
     }
 
@@ -259,7 +260,7 @@
             cmdLineParser.addArgument(new Args4jSetter(o -> appArgs.add(String.valueOf(o)), true, String.class),
                     new Args4jArgument());
         }
-        LOGGER.fine("parsing cmdline: " + Arrays.toString(args));
+        LOGGER.debug("parsing cmdline: " + Arrays.toString(args));
         if (args == null || args.length == 0) {
             LOGGER.info("no command line args supplied");
             return appArgs;
@@ -271,7 +272,7 @@
                 ConfigUtils.printUsage(e, usageFilter, System.err);
                 throw e;
             } else {
-                LOGGER.log(Level.FINE, "Ignoring parse exception due to -help", e);
+                LOGGER.log(Level.DEBUG, "Ignoring parse exception due to -help", e);
             }
         }
         if (bean.help) {
@@ -325,7 +326,7 @@
                     return;
                 }
                 final String value = iniOption.getValue();
-                LOGGER.fine("setting " + option.toIniString() + " to " + value);
+                LOGGER.debug("setting " + option.toIniString() + " to " + value);
                 final Object parsed = option.type().parse(value);
                 invokeSetters(option, parsed, node);
             }
@@ -348,7 +349,7 @@
     }
 
     private void applyDefaults() {
-        LOGGER.fine("applying defaults");
+        LOGGER.debug("applying defaults");
         sectionMap.forEach((key, value) -> {
             if (key == Section.NC) {
                 value.values().forEach(option -> getNodeNames()
@@ -487,7 +488,7 @@
             throw new IllegalStateException("Option not registered with ConfigManager: " + option.toIniString() + "("
                     + option.getClass() + "." + option + ")");
         } else if (option.section() == Section.NC) {
-            LOGGER.warning("NC option " + option.toIniString() + " being accessed outside of NC-scoped configuration.");
+            LOGGER.warn("NC option " + option.toIniString() + " being accessed outside of NC-scoped configuration.");
         }
         return getOrDefault(configurationMap, option, null);
     }
@@ -511,7 +512,7 @@
         if (description != null && !"".equals(description)) {
             usage.append(description).append(" ");
         } else {
-            LOGGER.warning("missing description for option: "
+            LOGGER.warn("missing description for option: "
                     + option.getClass().getName().substring(option.getClass().getName().lastIndexOf(".") + 1) + "."
                     + option.name());
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
index 1e92a7a..62e6ee0 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
@@ -19,11 +19,11 @@
 package org.apache.hyracks.control.common.config;
 
 import java.net.MalformedURLException;
-import java.util.logging.Level;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hyracks.api.config.IOptionType;
 import org.apache.hyracks.util.StorageUtil;
+import org.apache.logging.log4j.Level;
 
 import com.fasterxml.jackson.databind.node.ObjectNode;
 
@@ -169,7 +169,14 @@
     public static final IOptionType<Level> LEVEL = new IOptionType<Level>() {
         @Override
         public Level parse(String s) {
-            return s == null ? null : Level.parse(s);
+            if (s == null) {
+                throw new IllegalArgumentException("Logging level cannot be null");
+            }
+            final Level level = Level.getLevel(s);
+            if (level == null) {
+                throw new IllegalArgumentException("Unrecognized logging level: " + s);
+            }
+            return level;
         }
 
         @Override
@@ -179,12 +186,12 @@
 
         @Override
         public String serializeToJSON(Object value) {
-            return value == null ? null : ((Level)value).getName();
+            return value == null ? null : ((Level)value).name();
         }
 
         @Override
         public String serializeToIni(Object value) {
-            return ((Level)value).getName();
+            return ((Level)value).name();
         }
 
         @Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java
index da1714b..a9ca771 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java
@@ -21,11 +21,11 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataset.IDatasetManager;
 import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
 
 /**
  * Sweeper to clean up the stale result distribution files and result states.
@@ -59,7 +59,7 @@
                 Thread.sleep(resultSweepThreshold);
                 sweep();
             } catch (InterruptedException e) {
-                logger.log(Level.WARNING, "Result cleaner thread interrupted, shutting down.");
+                logger.log(Level.WARN, "Result cleaner thread interrupted, shutting down.");
                 break; // the interrupt was explicit from another thread. This thread should shut down...
             }
         }
@@ -78,8 +78,8 @@
                 datasetManager.deinitState(jobId);
             }
         }
-        if (logger.isLoggable(Level.FINER)) {
-            logger.finer("Result state cleanup instance successfully completed.");
+        if (logger.isTraceEnabled()) {
+            logger.trace("Result state cleanup instance successfully completed.");
         }
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
index 77c352e..dca8c07 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
@@ -35,8 +35,6 @@
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.api.comm.NetworkAddress;
@@ -66,9 +64,12 @@
 import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
 import org.apache.hyracks.ipc.api.IPayloadSerializerDeserializer;
 import org.apache.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class CCNCFunctions {
-    private static final Logger LOGGER = Logger.getLogger(CCNCFunctions.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final int FID_CODE_SIZE = 1;
 
@@ -1350,7 +1351,7 @@
             try {
                 serialize(baos, object, fid);
             } catch (Exception e) {
-                LOGGER.log(Level.SEVERE, "Error serializing " + object, e);
+                LOGGER.log(Level.ERROR, "Error serializing " + object, e);
                 throw e;
             }
             baos.close();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index f9af4c63..447d678 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -20,7 +20,6 @@
 
 import java.net.InetSocketAddress;
 import java.util.List;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.dataflow.TaskAttemptId;
@@ -55,9 +54,11 @@
 import org.apache.hyracks.control.common.job.profiling.om.JobProfile;
 import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
 import org.apache.hyracks.ipc.impl.IPCSystem;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ClusterControllerRemoteProxy extends ControllerRemoteProxy implements IClusterController {
-    private static final Logger LOGGER = Logger.getLogger(ClusterControllerRemoteProxy.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final int clusterConnectRetries;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java
index 83972d5..fe9e85a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java
@@ -19,12 +19,12 @@
 package org.apache.hyracks.control.common.ipc;
 
 import java.net.InetSocketAddress;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.ipc.api.IIPCHandle;
 import org.apache.hyracks.ipc.exceptions.IPCException;
 import org.apache.hyracks.ipc.impl.IPCSystem;
+import org.apache.logging.log4j.Logger;
 
 public abstract class ControllerRemoteProxy {
     protected final IPCSystem ipc;
@@ -55,14 +55,14 @@
         try {
             final boolean first = ipcHandle == null;
             if (!first) {
-                getLogger().warning("ipcHandle " + ipcHandle + " disconnected; retrying connection");
+                getLogger().warn("ipcHandle " + ipcHandle + " disconnected; retrying connection");
                 eventListener.ipcHandleDisconnected(ipcHandle);
             }
             ipcHandle = ipc.getHandle(inetSocketAddress, maxRetries);
             if (first) {
                 eventListener.ipcHandleConnected(ipcHandle);
             } else {
-                getLogger().warning("ipcHandle " + ipcHandle + " restored");
+                getLogger().warn("ipcHandle " + ipcHandle + " restored");
                 eventListener.ipcHandleRestored(ipcHandle);
             }
         } catch (IPCException e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
index 8431eca..b4aaf45 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
@@ -23,7 +23,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
@@ -50,9 +49,11 @@
 import org.apache.hyracks.control.common.ipc.CCNCFunctions.UndeployJobSpecFunction;
 import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
 import org.apache.hyracks.ipc.impl.IPCSystem;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NodeControllerRemoteProxy extends ControllerRemoteProxy implements INodeController {
-    private static final Logger LOGGER = Logger.getLogger(NodeControllerRemoteProxy.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public NodeControllerRemoteProxy(IPCSystem ipc, InetSocketAddress inetSocketAddress) {
         super(ipc, inetSocketAddress);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java
index c9ef4d08..6c50b09 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java
@@ -21,14 +21,16 @@
 import java.lang.Thread.UncaughtExceptionHandler;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class HyracksThreadFactory implements ThreadFactory {
     private final String identifier;
     private final AtomicInteger threadId = new AtomicInteger();
 
-    private static final Logger LOGGER = Logger.getLogger(HyracksThreadFactory.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public HyracksThreadFactory(String identifier) {
         this.identifier = identifier;
@@ -41,7 +43,7 @@
         t.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
             @Override
             public void uncaughtException(Thread t, Throwable e) {
-                LOGGER.log(Level.SEVERE, "Uncaught exception by " + t.getName(), e);
+                LOGGER.log(Level.ERROR, "Uncaught exception by " + t.getName(), e);
             }
         });
         return t;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java
index 076dd66..b7f3332 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java
@@ -18,9 +18,10 @@
  */
 package org.apache.hyracks.control.common.work;
 
-import java.util.logging.Level;
+import org.apache.logging.log4j.Level;
 
 public abstract class AbstractWork implements Runnable {
+
     public Level logLevel() {
         return Level.INFO;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
index f9952db..ad64fd2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
@@ -18,10 +18,14 @@
  */
 package org.apache.hyracks.control.common.work;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class SynchronizableWork extends AbstractWork {
+
+    private static final Logger LOGGER = LogManager.getLogger();
+
     private boolean done;
 
     private Exception e;
@@ -38,7 +42,7 @@
         try {
             doRun();
         } catch (Exception ex) {
-            Logger.getLogger(getClass().getName()).log(Level.INFO, "Exception thrown from work", ex);
+            LOGGER.log(Level.INFO, "Exception thrown from work", ex);
             this.e = ex;
         } finally {
             synchronized (this) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
index f1b00ab..dbcba99 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
@@ -23,13 +23,14 @@
 import java.lang.management.ThreadMXBean;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class WorkQueue {
-    private static final Logger LOGGER = Logger.getLogger(WorkQueue.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     //to be fixed when application vs. hyracks log level issues are sorted
     private static final boolean DEBUG = false;
 
@@ -80,10 +81,10 @@
 
     public void schedule(AbstractWork event) {
         if (DEBUG) {
-            LOGGER.log(Level.FINEST, "Enqueue (" + hashCode() + "): " + enqueueCount.incrementAndGet());
+            LOGGER.log(Level.DEBUG, "Enqueue (" + hashCode() + "): " + enqueueCount.incrementAndGet());
         }
-        if (LOGGER.isLoggable(Level.FINER)) {
-            LOGGER.finer("Scheduling: " + event);
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Scheduling: " + event);
         }
         queue.offer(event);
     }
@@ -115,18 +116,18 @@
                     break;
                 }
                 if (DEBUG) {
-                    LOGGER.log(Level.FINEST,
+                    LOGGER.log(Level.TRACE,
                             "Dequeue (" + WorkQueue.this.hashCode() + "): " + dequeueCount.incrementAndGet() + "/"
                                     + enqueueCount);
                 }
-                if (LOGGER.isLoggable(r.logLevel())) {
+                if (LOGGER.isEnabled(r.logLevel())) {
                     LOGGER.log(r.logLevel(), "Executing: " + r);
                 }
                 ThreadInfo before = threadMXBean.getThreadInfo(thread.getId());
                 try {
                     r.run();
                 } catch (Exception e) {
-                    LOGGER.log(Level.WARNING, "Exception while executing " + r, e);
+                    LOGGER.log(Level.WARN, "Exception while executing " + r, e);
                 } finally {
                     auditWaitsAndBlocks(r, before);
                 }
@@ -138,7 +139,7 @@
             final long waitedDelta = after.getWaitedCount() - before.getWaitedCount();
             final long blockedDelta = after.getBlockedCount() - before.getBlockedCount();
             if (waitedDelta > 0 || blockedDelta > 0) {
-                LOGGER.warning("Work " + r + " waited " + waitedDelta + " times (~"
+                LOGGER.warn("Work " + r + " waited " + waitedDelta + " times (~"
                         + (after.getWaitedTime() - before.getWaitedTime()) + "ms), blocked " + blockedDelta
                         + " times (~" + (after.getBlockedTime() - before.getBlockedTime()) + "ms)"
                 );
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index b5e96e3..594b701 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -83,5 +83,13 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-core</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java
index 4d8cbbd..94e86dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java
@@ -20,8 +20,6 @@
 
 import java.lang.management.ManagementFactory;
 import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.INCApplication;
 import org.apache.hyracks.api.application.IServiceContext;
@@ -33,6 +31,8 @@
 import org.apache.hyracks.control.common.controllers.ControllerConfig;
 import org.apache.hyracks.control.common.controllers.NCConfig;
 import org.apache.hyracks.control.nc.io.DefaultDeviceResolver;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.config.Configurator;
 
 public class BaseNCApplication implements INCApplication {
     public static final BaseNCApplication INSTANCE = new BaseNCApplication();
@@ -97,7 +97,7 @@
     }
 
     protected void configureLoggingLevel(Level level) {
-        Logger.getLogger("org.apache.hyracks").setLevel(level);
+        Configurator.setLevel("org.apache.hyracks", level);
     }
 
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
index ce666b0..8cb33ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
@@ -25,7 +25,6 @@
 import java.util.Hashtable;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.comm.IPartitionCollector;
@@ -60,9 +59,11 @@
 import org.apache.hyracks.control.nc.io.WorkspaceFileFactory;
 import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
 import org.apache.hyracks.control.nc.resources.memory.FrameManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class Joblet implements IHyracksJobletContext, ICounterContext {
-    private static final Logger LOGGER = Logger.getLogger(Joblet.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final NodeControllerService nodeController;
 
@@ -221,7 +222,7 @@
     public void close() {
         long stillAllocated = memoryAllocation.get();
         if (stillAllocated > 0) {
-            LOGGER.warning("Freeing leaked " + stillAllocated + " bytes");
+            LOGGER.warn("Freeing leaked " + stillAllocated + " bytes");
             serviceCtx.getMemoryManager().deallocate(stillAllocated);
         }
         nodeController.getExecutor().execute(new Runnable() {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
index 11df079..ec8cf27 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
@@ -20,18 +20,19 @@
 
 import java.io.IOException;
 import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.INCApplication;
 import org.apache.hyracks.control.common.config.ConfigManager;
 import org.apache.hyracks.control.common.config.ConfigUtils;
 import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.kohsuke.args4j.CmdLineException;
 
 @SuppressWarnings("InfiniteLoopStatement")
 public class NCDriver {
-    private static final Logger LOGGER = Logger.getLogger(NCDriver.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private NCDriver() {
     }
@@ -49,10 +50,10 @@
                 Thread.sleep(10000);
             }
         } catch (CmdLineException e) {
-            LOGGER.log(Level.FINE, "Exception parsing command line: " + Arrays.toString(args), e);
+            LOGGER.log(Level.DEBUG, "Exception parsing command line: " + Arrays.toString(args), e);
             System.exit(2);
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Exiting NCDriver due to exception", e);
+            LOGGER.log(Level.DEBUG, "Exiting NCDriver due to exception", e);
             System.exit(1);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java
index 6308373..020e564 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java
@@ -18,10 +18,10 @@
  */
 package org.apache.hyracks.control.nc;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.util.ThreadDumpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * Shutdown hook that invokes {@link NodeControllerService#stop() stop} method.
@@ -32,7 +32,7 @@
 
     public static final int FAILED_TO_STARTUP_EXIT_CODE = 2;
     public static final int FAILED_TO_RECOVER_EXIT_CODE = 3;
-    private static final Logger LOGGER = Logger.getLogger(NCShutdownHook.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final long SHUTDOWN_WAIT_TIME = 10 * 60 * 1000L;
     private final Thread watchDog;
     private final NodeControllerService nodeControllerService;
@@ -73,7 +73,7 @@
             LOGGER.log(Level.INFO, () -> "Thread dump at shutdown: " + ThreadDumpUtil.takeDumpString());
             nodeControllerService.stop();
         } catch (Throwable th) { // NOSONAR... This is fine since this is shutdown hook
-            LOGGER.log(Level.WARNING, "Exception in executing shutdown hook", th);
+            LOGGER.log(Level.WARN, "Exception in executing shutdown hook", th);
         }
     }
 }
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index a3a9ac5..01e34c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -40,8 +40,6 @@
 import java.util.concurrent.Semaphore;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.commons.lang3.mutable.MutableObject;
@@ -95,10 +93,13 @@
 import org.apache.hyracks.util.PidHelper;
 import org.apache.hyracks.util.trace.ITracer;
 import org.apache.hyracks.util.trace.Tracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.kohsuke.args4j.CmdLineException;
 
 public class NodeControllerService implements IControllerService {
-    private static final Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final double MEMORY_FUDGE_FACTOR = 0.8;
     private static final long ONE_SECOND_NANOS = TimeUnit.SECONDS.toNanos(1);
@@ -196,7 +197,7 @@
             throw new HyracksException("id not set");
         }
         lccm = new LifeCycleComponentManager();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Setting uncaught exception handler " + getLifeCycleComponentManager());
         }
         // Set shutdown hook before so it doesn't have the same uncaught exception handler
@@ -305,7 +306,7 @@
                         try {
                             registerNode();
                         } catch (Exception e) {
-                            LOGGER.log(Level.WARNING, "Failed Registering with cc", e);
+                            LOGGER.log(Level.WARN, "Failed Registering with cc", e);
                             throw new IPCException(e);
                         }
                     }
@@ -361,7 +362,7 @@
             }
         }
         if (registrationException != null) {
-            LOGGER.log(Level.WARNING, "Registering with Cluster Controller failed with exception",
+            LOGGER.log(Level.WARN, "Registering with Cluster Controller failed with exception",
                     registrationException);
             throw registrationException;
         }
@@ -390,7 +391,7 @@
             application.preStop();
             executor.shutdownNow();
             if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
-                LOGGER.log(Level.SEVERE, "Some jobs failed to exit, continuing with abnormal shutdown");
+                LOGGER.log(Level.ERROR, "Some jobs failed to exit, continuing with abnormal shutdown");
             }
             partitionManager.close();
             datasetPartitionManager.close();
@@ -412,13 +413,13 @@
             try {
                 ccs.notifyShutdown(id);
             } catch (Exception e) {
-                LOGGER.log(Level.WARNING, "Exception notifying CC of shutdown", e);
+                LOGGER.log(Level.WARN, "Exception notifying CC of shutdown", e);
             }
             ipc.stop();
 
             LOGGER.log(Level.INFO, "Stopped NodeControllerService");
         } else {
-            LOGGER.log(Level.SEVERE, "Duplicate shutdown call; original: " + Arrays.toString(shutdownCallStack),
+            LOGGER.log(Level.ERROR, "Duplicate shutdown call; original: " + Arrays.toString(shutdownCallStack),
                     new Exception("Duplicate shutdown call"));
         }
     }
@@ -540,7 +541,7 @@
             if (delayNanos > 0) {
                 delayBlock.tryAcquire(delayNanos, TimeUnit.NANOSECONDS); //NOSONAR - ignore result of tryAcquire
             } else {
-                LOGGER.warning("After sending heartbeat, next one is already late by "
+                LOGGER.warn("After sending heartbeat, next one is already late by "
                         + TimeUnit.NANOSECONDS.toMillis(-delayNanos) + "ms; sending without delay");
             }
         }
@@ -591,15 +592,15 @@
 
             try {
                 cc.nodeHeartbeat(id, hbData);
-                LOGGER.log(Level.FINE, "Successfully sent heartbeat");
+                LOGGER.log(Level.DEBUG, "Successfully sent heartbeat");
                 return true;
             } catch (InterruptedException e) {
                 throw e;
             } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.log(Level.FINE, "Exception sending heartbeat; will retry after 1s", e);
+                if (LOGGER.isDebugEnabled()) {
+                    LOGGER.log(Level.DEBUG, "Exception sending heartbeat; will retry after 1s", e);
                 } else {
-                    LOGGER.log(Level.SEVERE, "Exception sending heartbeat; will retry after 1s: " + e.toString());
+                    LOGGER.log(Level.ERROR, "Exception sending heartbeat; will retry after 1s: " + e.toString());
                 }
                 return false;
             }
@@ -624,7 +625,7 @@
                     cc.reportProfile(id, profiles);
                 }
             } catch (Exception e) {
-                LOGGER.log(Level.WARNING, "Exception reporting profile", e);
+                LOGGER.log(Level.WARN, "Exception reporting profile", e);
             }
         }
     }
@@ -644,7 +645,7 @@
             try {
                 tracer.instant("CurrentTime", traceCategory, Tracer.Scope.p, Tracer.dateTimeStamp());
             } catch (Exception e) {
-                LOGGER.log(Level.WARNING, "Exception tracing current time", e);
+                LOGGER.log(Level.WARN, "Exception tracing current time", e);
             }
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
index 94ee92b..34ddd6a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
@@ -31,8 +31,6 @@
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Semaphore;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameReader;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -69,9 +67,12 @@
 import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
 import org.apache.hyracks.control.nc.work.NotifyTaskCompleteWork;
 import org.apache.hyracks.control.nc.work.NotifyTaskFailureWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class Task implements IHyracksTaskContext, ICounterContext, Runnable {
-    private static final Logger LOGGER = Logger.getLogger(Task.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final Joblet joblet;
 
@@ -355,9 +356,9 @@
             removePendingThread(ct);
         }
         if (!exceptions.isEmpty()) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
+            if (LOGGER.isWarnEnabled()) {
                 for (int i = 0; i < exceptions.size(); i++) {
-                    LOGGER.log(Level.WARNING,
+                    LOGGER.log(Level.WARN,
                             "Task " + taskAttemptId + " failed with exception"
                                     + (exceptions.size() > 1 ? "s (" + (i + 1) + "/" + exceptions.size() + ")" : ""),
                             exceptions.get(i));
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java
index 962d541..bc980e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java
@@ -22,7 +22,6 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.Executor;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -36,9 +35,11 @@
 import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.control.nc.io.WorkspaceFileFactory;
 import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class DatasetPartitionManager implements IDatasetPartitionManager {
-    private static final Logger LOGGER = Logger.getLogger(DatasetPartitionManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final NodeControllerService ncs;
 
@@ -82,7 +83,7 @@
             resultStates[partition] = dpw.getResultState();
         }
 
-        LOGGER.fine("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
+        LOGGER.debug("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
         return dpw;
     }
 
@@ -101,7 +102,7 @@
     @Override
     public void reportPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws HyracksException {
         try {
-            LOGGER.fine("Reporting partition write completion: JobId: " + jobId + ": ResultSetId: " + rsId
+            LOGGER.debug("Reporting partition write completion: JobId: " + jobId + ": ResultSetId: " + rsId
                     + ":partition: " + partition);
             ncs.getClusterController().reportResultPartitionWriteCompletion(jobId, rsId, partition);
         } catch (Exception e) {
@@ -115,7 +116,7 @@
         ResultState resultState = getResultState(jobId, resultSetId, partition);
         DatasetPartitionReader dpr = new DatasetPartitionReader(this, datasetMemoryManager, executor, resultState);
         dpr.writeTo(writer);
-        LOGGER.fine("Initialized partition reader: JobId: " + jobId + ":ResultSetId: " + resultSetId + ":partition: "
+        LOGGER.debug("Initialized partition reader: JobId: " + jobId + ":ResultSetId: " + resultSetId + ":partition: "
                 + partition);
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
index 732ee68..ec33b05 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
@@ -20,15 +20,15 @@
 
 import java.nio.ByteBuffer;
 import java.util.concurrent.Executor;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.comm.channels.NetworkOutputChannel;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class DatasetPartitionReader {
-    private static final Logger LOGGER = Logger.getLogger(DatasetPartitionReader.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final DatasetPartitionManager datasetPartitionManager;
 
@@ -85,7 +85,7 @@
                 } catch (HyracksDataException e) {
                     throw new RuntimeException(e);
                 }
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("result reading successful(" + resultState.getResultSetPartitionId() + ")");
                 }
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java
index b654d44..2bf5326 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.control.nc.dataset;
 
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -31,9 +29,11 @@
 import org.apache.hyracks.api.io.IWorkspaceFileFactory;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.partitions.ResultSetPartitionId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class DatasetPartitionWriter implements IFrameWriter {
-    private static final Logger LOGGER = Logger.getLogger(DatasetPartitionWriter.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final IDatasetPartitionManager manager;
 
@@ -79,7 +79,7 @@
 
     @Override
     public void open() {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("open(" + partition + ")");
         }
         partitionRegistered = false;
@@ -105,7 +105,7 @@
 
     @Override
     public void close() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("close(" + partition + ")");
         }
         try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java
index 579f68b..3957401 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java
@@ -21,16 +21,17 @@
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataset.IDatasetStateRecord;
 import org.apache.hyracks.api.dataset.ResultSetId;
 import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 class ResultSetMap implements IDatasetStateRecord, Serializable {
     private static final long serialVersionUID = 1L;
 
-    private static final Logger LOGGER = Logger.getLogger(DatasetPartitionManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final long timestamp;
     private final HashMap<ResultSetId, ResultState[]> resultStateMap;
@@ -70,7 +71,7 @@
             final ResultState state = resultStates[partition];
             if (state != null) {
                 state.closeAndDelete();
-                LOGGER.fine("Removing partition: " + partition + " for JobId: " + jobId);
+                LOGGER.debug("Removing partition: " + partition + " for JobId: " + jobId);
             }
             resultStates[partition] = null;
             boolean stateEmpty = true;
@@ -95,7 +96,7 @@
     void closeAndDeleteAll() {
         applyToAllStates((rsId, state, i) -> {
             state.closeAndDelete();
-            LOGGER.fine("Removing partition: " + i + " for result set " + rsId);
+            LOGGER.debug("Removing partition: " + i + " for result set " + rsId);
         });
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java
index 0b74806..5eba281 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java
@@ -22,8 +22,6 @@
 import java.net.InetSocketAddress;
 import java.net.SocketAddress;
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
 import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
@@ -40,9 +38,11 @@
 import org.apache.hyracks.net.protocols.muxdemux.MultiplexedConnection;
 import org.apache.hyracks.net.protocols.muxdemux.MuxDemux;
 import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class DatasetNetworkManager implements IChannelConnectionFactory {
-    private static final Logger LOGGER = Logger.getLogger(DatasetNetworkManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final int MAX_CONNECTION_ATTEMPTS = 5;
 
@@ -137,8 +137,8 @@
             JobId jobId = new JobId(buffer.getLong());
             ResultSetId rsId = new ResultSetId(buffer.getLong());
             int partition = buffer.getInt();
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Received initial dataset partition read request for JobId: " + jobId + " partition: "
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Received initial dataset partition read request for JobId: " + jobId + " partition: "
                         + partition + " on channel: " + ccb);
             }
             noc = new NetworkOutputChannel(ccb, nBuffers);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java
index 7983b93..a37d131 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java
@@ -25,8 +25,6 @@
 import java.nio.ByteBuffer;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.api.comm.IChannelControlBlock;
@@ -40,10 +38,12 @@
 import org.apache.hyracks.net.protocols.muxdemux.MultiplexedConnection;
 import org.apache.hyracks.net.protocols.muxdemux.MuxDemux;
 import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class MessagingNetworkManager {
 
-    private static final Logger LOGGER = Logger.getLogger(MessagingNetworkManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final int MAX_CONNECTION_ATTEMPTS = 5;
     private final MuxDemux md;
     private NetworkAddress localNetworkAddress;
@@ -193,8 +193,8 @@
         @Override
         public void accept(ByteBuffer buffer) {
             String nodeId = readMessagingInitialMessage(buffer);
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Opened messaging channel with node: " + nodeId);
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Opened messaging channel with node: " + nodeId);
             }
             // Return the channel's original acceptor
             ICloseableBufferAcceptor originalAcceptor;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java
index 325966a..f3276a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java
@@ -22,8 +22,6 @@
 import java.net.InetSocketAddress;
 import java.net.SocketAddress;
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
 import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
@@ -41,9 +39,11 @@
 import org.apache.hyracks.net.protocols.muxdemux.MultiplexedConnection;
 import org.apache.hyracks.net.protocols.muxdemux.MuxDemux;
 import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NetworkManager implements IChannelConnectionFactory {
-    private static final Logger LOGGER = Logger.getLogger(NetworkManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final int MAX_CONNECTION_ATTEMPTS = 5;
 
@@ -125,8 +125,8 @@
         @Override
         public void accept(ByteBuffer buffer) {
             PartitionId pid = readInitialMessage(buffer);
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Received initial partition request: " + pid + " on channel: " + ccb);
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Received initial partition request: " + pid + " on channel: " + ccb);
             }
             noc = new NetworkOutputChannel(ccb, nBuffers);
             try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
index 3b52dc9..b9d2f4d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
@@ -20,8 +20,6 @@
 
 import java.nio.ByteBuffer;
 import java.util.concurrent.Executor;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -32,9 +30,11 @@
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.api.partitions.PartitionId;
 import org.apache.hyracks.control.common.job.PartitionState;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class MaterializedPartitionWriter implements IFrameWriter {
-    private static final Logger LOGGER = Logger.getLogger(MaterializedPartitionWriter.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final IHyracksTaskContext ctx;
 
@@ -65,7 +65,7 @@
 
     @Override
     public void open() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("open(" + pid + " by " + taId);
         }
         failed = false;
@@ -89,7 +89,7 @@
 
     @Override
     public void close() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("close(" + pid + " by " + taId);
         }
         if (handle != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
index 3582da2..57eba53 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
@@ -20,8 +20,6 @@
 
 import java.nio.ByteBuffer;
 import java.util.concurrent.Executor;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -33,9 +31,12 @@
 import org.apache.hyracks.api.partitions.IPartition;
 import org.apache.hyracks.api.partitions.PartitionId;
 import org.apache.hyracks.control.common.job.PartitionState;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class MaterializingPipelinedPartition implements IFrameWriter, IPartition {
-    private static final Logger LOGGER = Logger.getLogger(MaterializingPipelinedPartition.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final IHyracksTaskContext ctx;
     private final Executor executor;
@@ -50,7 +51,7 @@
     private boolean failed;
     protected boolean flushRequest;
     private boolean deallocated;
-    private Level openCloseLevel = Level.FINE;
+    private Level openCloseLevel = Level.DEBUG;
     private Thread dataConsumerThread;
 
     public MaterializingPipelinedPartition(IHyracksTaskContext ctx, PartitionManager manager, PartitionId pid,
@@ -164,7 +165,7 @@
                         }
                     }
                 } catch (Exception e) {
-                    LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                    LOGGER.log(Level.ERROR, e.getMessage(), e);
                 } finally {
                     thread.setName(oldName);
                     setDataConsumerThread(null); // Sets back the data consumer thread to null.
@@ -180,7 +181,7 @@
 
     @Override
     public void open() throws HyracksDataException {
-        if (LOGGER.isLoggable(openCloseLevel)) {
+        if (LOGGER.isEnabled(openCloseLevel)) {
             LOGGER.log(openCloseLevel, "open(" + pid + " by " + taId);
         }
         size = 0;
@@ -213,7 +214,7 @@
 
     @Override
     public void close() throws HyracksDataException {
-        if (LOGGER.isLoggable(openCloseLevel)) {
+        if (LOGGER.isEnabled(openCloseLevel)) {
             LOGGER.log(openCloseLevel, "close(" + pid + " by " + taId);
         }
         if (writeHandle != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java
index e23aaaa..f43dcbc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java
@@ -18,14 +18,14 @@
  */
 package org.apache.hyracks.control.nc.task;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.util.ThreadDumpUtil;
 import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ThreadDumpTask implements Runnable {
-    private static final Logger LOGGER = Logger.getLogger(ThreadDumpTask.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final NodeControllerService ncs;
     private final String requestId;
 
@@ -40,14 +40,14 @@
         try {
             result = ThreadDumpUtil.takeDumpJSONString();
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Exception taking thread dump", e);
+            LOGGER.log(Level.WARN, "Exception taking thread dump", e);
             result = null;
         }
         try {
             ncs.getClusterController().notifyThreadDump(
                     ncs.getContext().getNodeId(), requestId, result);
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Exception sending thread dump to CC", e);
+            LOGGER.log(Level.WARN, "Exception sending thread dump to CC", e);
         }
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java
index 56100da..6132639 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.control.nc.work;
 
 import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
 import org.apache.hyracks.api.job.JobStatus;
@@ -28,10 +26,13 @@
 import org.apache.hyracks.control.nc.Joblet;
 import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class AbortAllJobsWork extends SynchronizableWork {
 
-    private static final Logger LOGGER = Logger.getLogger(AbortAllJobsWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final NodeControllerService ncs;
 
     public AbortAllJobsWork(NodeControllerService ncs) {
@@ -40,14 +41,14 @@
 
     @Override
     protected void doRun() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Aborting all tasks");
         }
         IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
         if (dpm != null) {
             ncs.getDatasetPartitionManager().abortAllReaders();
         } else {
-            LOGGER.log(Level.WARNING, "DatasetPartitionManager is null on " + ncs.getId());
+            LOGGER.log(Level.WARN, "DatasetPartitionManager is null on " + ncs.getId());
         }
         Collection<Joblet> joblets = ncs.getJobletMap().values();
         for (Joblet ji : joblets) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java
index 5870e76..80f3e98 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java
@@ -20,8 +20,6 @@
 
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.TaskAttemptId;
 import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
@@ -30,9 +28,12 @@
 import org.apache.hyracks.control.nc.Joblet;
 import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class AbortTasksWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(AbortTasksWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final NodeControllerService ncs;
 
@@ -48,7 +49,7 @@
 
     @Override
     public void run() {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Aborting Tasks: " + jobId + ":" + tasks);
         }
         IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
@@ -65,7 +66,7 @@
                 }
             }
         } else {
-            LOGGER.log(Level.WARNING,
+            LOGGER.log(Level.WARN,
                     "Joblet couldn't be found. Tasks of job " + jobId + " have all either completed or failed");
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java
index 4f5b556..33d1d60 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java
@@ -18,18 +18,18 @@
  */
 package org.apache.hyracks.control.nc.work;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.deployment.DeploymentId;
 import org.apache.hyracks.api.messages.IMessage;
 import org.apache.hyracks.control.common.deployment.DeploymentUtils;
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.control.nc.application.NCServiceContext;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ApplicationMessageWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(ApplicationMessageWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private byte[] message;
     private DeploymentId deploymentId;
     private String nodeId;
@@ -50,10 +50,10 @@
             if (ctx.getMessageBroker() != null) {
                 ctx.getMessageBroker().receivedMessage(data, nodeId);
             } else {
-                LOGGER.log(Level.WARNING, "Message was sent, but no Message Broker set!");
+                LOGGER.log(Level.WARN, "Message was sent, but no Message Broker set!");
             }
         } catch (Exception e) {
-            Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "Error in application message delivery!", e);
+            LOGGER.warn("Error in application message delivery!", e);
             throw new RuntimeException(e);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java
index 03ae90c..d38cd5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java
@@ -21,8 +21,6 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobStatus;
@@ -30,9 +28,12 @@
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.nc.Joblet;
 import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class CleanupJobletWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(CleanupJobletWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final NodeControllerService ncs;
 
@@ -48,7 +49,7 @@
 
     @Override
     public void run() {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Cleaning up after job: " + jobId);
         }
         ncs.removeJobParameterByteStore(jobId);
@@ -62,8 +63,8 @@
                         // Put deallocate in a try block to make sure that every IPartition is de-allocated.
                         p.deallocate();
                     } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.log(Level.WARNING, e.getMessage(), e);
+                        if (LOGGER.isWarnEnabled()) {
+                            LOGGER.log(Level.WARN, e.getMessage(), e);
                         }
                     }
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java
index 675926e..449d9a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java
@@ -18,16 +18,16 @@
  */
 package org.apache.hyracks.control.nc.work;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NotifyTaskCompleteWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(NotifyTaskCompleteWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final NodeControllerService ncs;
     private final Task task;
 
@@ -44,7 +44,7 @@
             ncs.getClusterController().notifyTaskComplete(task.getJobletContext().getJobId(), task.getTaskAttemptId(),
                     ncs.getId(), taskProfile);
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Failed notifying task complete for " + task.getTaskAttemptId(), e);
+            LOGGER.log(Level.ERROR, "Failed notifying task complete for " + task.getTaskAttemptId(), e);
         }
         task.getJoblet().removeTask(task);
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java
index 7ed2c09..1d6ae1b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.control.nc.work;
 
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.TaskAttemptId;
 import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
@@ -28,9 +26,12 @@
 import org.apache.hyracks.control.common.work.AbstractWork;
 import org.apache.hyracks.control.nc.NodeControllerService;
 import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NotifyTaskFailureWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(NotifyTaskFailureWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final NodeControllerService ncs;
     private final Task task;
     private final JobId jobId;
@@ -48,7 +49,7 @@
 
     @Override
     public void run() {
-        LOGGER.log(Level.WARNING, ncs.getId() + " is sending a notification to cc that task " + taskId + " has failed",
+        LOGGER.log(Level.WARN, ncs.getId() + " is sending a notification to cc that task " + taskId + " has failed",
                 exceptions.get(0));
         try {
             IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
@@ -57,7 +58,7 @@
             }
             ncs.getClusterController().notifyTaskFailure(jobId, taskId, ncs.getId(), exceptions);
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Failure reporting task failure to cluster controller", e);
+            LOGGER.log(Level.ERROR, "Failure reporting task failure to cluster controller", e);
         }
         if (task != null) {
             task.getJoblet().removeTask(task);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java
index a2fcc25..f818c0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java
@@ -25,8 +25,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -70,9 +68,12 @@
 import org.apache.hyracks.control.nc.partitions.PipelinedPartition;
 import org.apache.hyracks.control.nc.partitions.ReceiveSideMaterializingCollector;
 import org.apache.hyracks.control.nc.profiling.ProfilingPartitionWriterFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class StartTasksWork extends AbstractWork {
-    private static final Logger LOGGER = Logger.getLogger(StartTasksWork.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final NodeControllerService ncs;
 
@@ -138,7 +139,7 @@
                 ActivityId aid = tid.getActivityId();
                 ActivityCluster ac = acg.getActivityMap().get(aid);
                 IActivity han = ac.getActivityMap().get(aid);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("Initializing " + taId + " -> " + han);
                 }
                 final int partition = tid.getPartition();
@@ -152,9 +153,7 @@
                     for (int i = 0; i < inputs.size(); ++i) {
                         IConnectorDescriptor conn = inputs.get(i);
                         IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("input: " + i + ": " + conn.getConnectorId());
-                        }
+                        LOGGER.info("input: {}: {}", i, conn.getConnectorId());
                         RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                         IPartitionCollector collector =
                                 createPartitionCollector(td, partition, task, i, conn, recordDesc, cPolicy);
@@ -171,10 +170,7 @@
 
                         IPartitionWriterFactory pwFactory =
                                 createPartitionWriterFactory(task, cPolicy, jobId, conn, partition, taId, flags);
-
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("output: " + i + ": " + conn.getConnectorId());
-                        }
+                        LOGGER.info("input: {}: {}", i, conn.getConnectorId());
                         IFrameWriter writer = conn.createPartitioner(task, recordDesc, pwFactory, partition,
                                 td.getPartitionCount(), td.getOutputPartitionCounts()[i]);
                         writer = enforce ? EnforceFrameWriter.enforce(writer) : writer;
@@ -188,7 +184,7 @@
                 taskIndex++;
             }
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Failure starting a task", e);
+            LOGGER.log(Level.WARN, "Failure starting a task", e);
             // notify cc of start task failure
             List<Exception> exceptions = new ArrayList<>();
             exceptions.add(e);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
index c4ade2c..4473e5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
@@ -66,6 +66,10 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
index c0e5678..b6d0b70 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
@@ -34,8 +34,6 @@
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.SystemUtils;
 import org.apache.hyracks.control.common.config.ConfigUtils;
@@ -43,6 +41,9 @@
 import org.apache.hyracks.api.config.Section;
 import org.apache.hyracks.control.common.controllers.ServiceConstants;
 import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.ini4j.Ini;
 import org.kohsuke.args4j.CmdLineParser;
 
@@ -52,7 +53,7 @@
  */
 public class NCService {
 
-    private static final Logger LOGGER = Logger.getLogger(NCService.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     /**
      * The .ini read from the CC (*not* the ncservice.ini file)
@@ -159,7 +160,7 @@
             // QQQ inheriting probably isn't right
             pb.inheritIO();
 
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Launching NCDriver process");
             }
 
@@ -178,7 +179,7 @@
                     writer.write("---------------------\n");
                 }
                 pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logfile));
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("Logging to " + logfile.getCanonicalPath());
                 }
             }
@@ -200,13 +201,13 @@
             }
             return retval == 0;
         } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
+            if (LOGGER.isErrorEnabled()) {
                 StringWriter sw = new StringWriter();
                 try {
                     ini.store(sw);
-                    LOGGER.log(Level.SEVERE, "Configuration from CC broken: \n" + sw.toString(), e);
+                    LOGGER.log(Level.ERROR, "Configuration from CC broken: \n" + sw.toString(), e);
                 } catch (IOException e1) {
-                    LOGGER.log(Level.SEVERE, "Configuration from CC broken, failed to serialize", e1);
+                    LOGGER.log(Level.ERROR, "Configuration from CC broken, failed to serialize", e1);
                 }
             }
             return false;
@@ -225,7 +226,7 @@
             ObjectInputStream ois = new ObjectInputStream(is);
             String magic = ois.readUTF();
             if (!ServiceConstants.NC_SERVICE_MAGIC_COOKIE.equals(magic)) {
-                LOGGER.severe("Connection used incorrect magic cookie");
+                LOGGER.error("Connection used incorrect magic cookie");
                 return false;
             }
             switch (ServiceCommand.valueOf(ois.readUTF())) {
@@ -241,7 +242,7 @@
                     break;
             }
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Error decoding connection from server", e);
+            LOGGER.log(Level.ERROR, "Error decoding connection from server", e);
         }
         return false;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
index f44ed74..5088719 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
@@ -79,6 +79,10 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java
index 819d751..5a39523 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java
@@ -21,16 +21,16 @@
 import java.io.DataInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.FrameConstants;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class FrameDeserializer {
-    private static final Logger LOGGER = Logger.getLogger(FrameDeserializer.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ByteBufferInputStream bbis;
 
@@ -70,8 +70,8 @@
         Object[] record = new Object[recordDescriptor.getFieldCount()];
         for (int i = 0; i < record.length; ++i) {
             Object instance = recordDescriptor.getFields()[i].deserialize(di);
-            if (LOGGER.isLoggable(Level.FINEST)) {
-                LOGGER.finest(i + " " + instance);
+            if (LOGGER.isTraceEnabled()) {
+                LOGGER.trace(i + " " + instance);
             }
             record[i] = instance;
             if (FrameConstants.DEBUG_FRAME_IO) {
@@ -84,8 +84,8 @@
                 }
             }
         }
-        if (LOGGER.isLoggable(Level.FINEST)) {
-            LOGGER.finest("Read Record tIndex = " + tIndex + ", tupleCount = " + tupleCount);
+        if (LOGGER.isTraceEnabled()) {
+            LOGGER.trace("Read Record tIndex = " + tIndex + ", tupleCount = " + tupleCount);
         }
         ++tIndex;
         return record;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java
index e8f826f..d3af00d 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java
@@ -18,16 +18,15 @@
  */
 package org.apache.hyracks.dataflow.common.comm.io;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class FrameOutputStream extends ByteArrayAccessibleOutputStream {
-    private static final Logger LOGGER = Logger.getLogger(FrameOutputStream.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final FrameTupleAppender frameTupleAppender;
 
@@ -42,15 +41,15 @@
 
     public int getTupleCount() {
         int tupleCount = frameTupleAppender.getTupleCount();
-        if (LOGGER.isLoggable(Level.FINEST)) {
-            LOGGER.finest("appendTuple(): tuple count: " + tupleCount);
+        if (LOGGER.isTraceEnabled()) {
+            LOGGER.trace("appendTuple(): tuple count: " + tupleCount);
         }
         return tupleCount;
     }
 
     public boolean appendTuple() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.FINEST)) {
-            LOGGER.finest("appendTuple(): tuple size: " + count);
+        if (LOGGER.isTraceEnabled()) {
+            LOGGER.trace("appendTuple(): tuple size: " + count);
         }
         boolean appended = frameTupleAppender.append(buf, 0, count);
         count = 0;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
index d9a4c7c..f7b5e3b 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
@@ -18,9 +18,6 @@
  */
 package org.apache.hyracks.dataflow.common.comm.io;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.comm.VSizeFrame;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -28,9 +25,11 @@
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class SerializingDataWriter implements IOpenableDataWriter<Object[]> {
-    private static final Logger LOGGER = Logger.getLogger(SerializingDataWriter.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ArrayTupleBuilder tb;
 
@@ -82,8 +81,8 @@
         tb.reset();
         for (int i = 0; i < data.length; ++i) {
             Object instance = data[i];
-            if (LOGGER.isLoggable(Level.FINEST)) {
-                LOGGER.finest(i + " " + instance);
+            if (LOGGER.isTraceEnabled()) {
+                LOGGER.trace(i + " " + instance);
             }
             tb.addField(recordDescriptor.getFields()[i], instance);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index a882c1c..fb16cba 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -90,5 +90,9 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
index 5a59b5d..6c67ecc 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
@@ -21,8 +21,6 @@
 
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
@@ -30,13 +28,15 @@
 import org.apache.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppender;
 import org.apache.hyracks.dataflow.std.sort.util.IAppendDeletableFrameTupleAccessor;
 import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * Enable the delete record operation in the memory management. This is only used in the {@link org.apache.hyracks.dataflow.std.sort.HeapSortRunGenerator}
  */
 public class VariableDeletableTupleMemoryManager implements IDeletableTupleBufferManager {
 
-    private static final Logger LOG = Logger.getLogger(VariableDeletableTupleMemoryManager.class.getName());
+    private static final Logger LOG = LogManager.getLogger();
 
     private final int minFreeSpace;
     private final IFramePool pool;
@@ -161,8 +161,8 @@
         policy.reset();
         frames.clear();
         numTuples = 0;
-        if (LOG.isLoggable(Level.FINE)) {
-            LOG.fine("VariableTupleMemoryManager has reorganized " + statsReOrg + " times");
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("VariableTupleMemoryManager has reorganized " + statsReOrg + " times");
         }
         statsReOrg = 0;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
index d7d5c27..9676c9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
@@ -20,16 +20,16 @@
 
 import java.nio.ByteBuffer;
 import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.channels.IInputChannel;
 import org.apache.hyracks.api.channels.IInputChannelMonitor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.partitions.PartitionId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class NonDeterministicChannelReader implements IInputChannelMonitor, IPartitionAcceptor {
-    private static final Logger LOGGER = Logger.getLogger(NonDeterministicChannelReader.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final int nSenderPartitions;
 
@@ -143,8 +143,8 @@
     public synchronized void notifyFailure(IInputChannel channel) {
         PartitionId pid = (PartitionId) channel.getAttachment();
         int senderIndex = pid.getSenderIndex();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Failure: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Failure: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
                     + pid.getReceiverIndex());
         }
         failSenders.set(senderIndex);
@@ -156,8 +156,8 @@
     public synchronized void notifyDataAvailability(IInputChannel channel, int nFrames) {
         PartitionId pid = (PartitionId) channel.getAttachment();
         int senderIndex = pid.getSenderIndex();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Data available: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Data available: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
                     + pid.getReceiverIndex());
         }
         availableFrameCounts[senderIndex] += nFrames;
@@ -169,8 +169,8 @@
     public synchronized void notifyEndOfStream(IInputChannel channel) {
         PartitionId pid = (PartitionId) channel.getAttachment();
         int senderIndex = pid.getSenderIndex();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("EOS: " + pid);
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("EOS: " + pid);
         }
         eosSenders.set(senderIndex);
         notifyAll();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index 43b9685..40f02f9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.dataflow.std.group;
 
 import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -48,10 +46,12 @@
 import org.apache.hyracks.dataflow.std.structures.SerializableHashTable;
 import org.apache.hyracks.dataflow.std.structures.TuplePointer;
 import org.apache.hyracks.dataflow.std.util.FrameTuplePairComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class HashSpillableTableFactory implements ISpillableTableFactory {
 
-    private static Logger LOGGER = Logger.getLogger(HashSpillableTableFactory.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final double FUDGE_FACTOR = 1.1;
     private static final long serialVersionUID = 1L;
     private final IBinaryHashFunctionFamily[] hashFunctionFamilies;
@@ -109,8 +109,8 @@
 
         final int numPartitions = getNumOfPartitions(inputDataBytesSize / ctx.getInitialFrameSize(), memoryBudget);
         final int entriesPerPartition = (int) Math.ceil(1.0 * tableSize / numPartitions);
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine(
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug(
                     "created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize + "  #partitions:"
                     + numPartitions);
         }
@@ -159,8 +159,8 @@
                 if (hashTableForTuplePointer.isGarbageCollectionNeeded()) {
                     int numberOfFramesReclaimed = hashTableForTuplePointer.collectGarbage(bufferAccessor,
                             tpcIntermediate);
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("Garbage Collection on Hash table is done. Deallocated frames:"
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("Garbage Collection on Hash table is done. Deallocated frames:"
                                 + numberOfFramesReclaimed);
                     }
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
index d7b76ce..7e6e147 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.dataflow.std.group.external;
 
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
@@ -35,11 +33,13 @@
 import org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
 import org.apache.hyracks.dataflow.std.group.ISpillableTable;
 import org.apache.hyracks.dataflow.std.group.ISpillableTableFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ExternalGroupBuildOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable
         implements IRunFileWriterGenerator {
 
-    private static Logger LOGGER = Logger.getLogger("ExternalGroupBuildPhase");
+    private static final Logger LOGGER = LogManager.getLogger();
     private final IHyracksTaskContext ctx;
     private final Object stateId;
     private final int[] keyFields;
@@ -115,7 +115,7 @@
         } else {
             externalGroupBy.flushSpilledPartitions();
             ctx.setStateObject(state);
-            if (LOGGER.isLoggable(Level.FINE)) {
+            if (LOGGER.isDebugEnabled()) {
                 int numOfPartition = state.getSpillableTable().getNumPartitions();
                 int numOfSpilledPart = 0;
                 for (int i = 0; i < numOfPartition; i++) {
@@ -123,7 +123,7 @@
                         numOfSpilledPart++;
                     }
                 }
-                LOGGER.fine("level 0:" + "build with " + numOfPartition + " partitions" + ", spilled "
+                LOGGER.debug("level 0:" + "build with " + numOfPartition + " partitions" + ", spilled "
                         + numOfSpilledPart + " partitions");
             }
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
index 4d368bd..fb88775 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.dataflow.std.group.external;
 
 import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.comm.VSizeFrame;
@@ -39,10 +37,12 @@
 import org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
 import org.apache.hyracks.dataflow.std.group.ISpillableTable;
 import org.apache.hyracks.dataflow.std.group.ISpillableTableFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ExternalGroupWriteOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable
         implements IRunFileWriterGenerator {
-    private static Logger LOGGER = Logger.getLogger("ExternalGroupbyWrite");
+    private static final Logger LOGGER = LogManager.getLogger();
     private final IHyracksTaskContext ctx;
     private final Object stateId;
     private final ISpillableTableFactory spillableTableFactory;
@@ -138,14 +138,14 @@
                     }
                 }
 
-                if (LOGGER.isLoggable(Level.FINE)) {
+                if (LOGGER.isDebugEnabled()) {
                     int numOfSpilledPart = 0;
                     for (int x = 0; x < numOfTuples.length; x++) {
                         if (numOfTuples[x] > 0) {
                             numOfSpilledPart++;
                         }
                     }
-                    LOGGER.fine("level " + level + ":" + "build with " + numOfTuples.length + " partitions"
+                    LOGGER.debug("level " + level + ":" + "build with " + numOfTuples.length + " partitions"
                             + ", spilled " + numOfSpilledPart + " partitions");
                 }
                 doPass(partitionTable, runFileWriters, sizeInTuplesNextLevel, writer, level + 1);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
index 7f34d13..a51b780 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -22,8 +22,6 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -43,6 +41,8 @@
 import org.apache.hyracks.dataflow.std.structures.ISerializableTable;
 import org.apache.hyracks.dataflow.std.structures.TuplePointer;
 import org.apache.hyracks.dataflow.std.util.FrameTuplePairComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class InMemoryHashJoin {
 
@@ -64,7 +64,7 @@
     ISimpleFrameBufferManager bufferManager;
     private final boolean isTableCapacityNotZero;
 
-    private static final Logger LOGGER = Logger.getLogger(InMemoryHashJoin.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
             ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild,
@@ -112,7 +112,7 @@
         } else {
             isTableCapacityNotZero = false;
         }
-        LOGGER.fine("InMemoryHashJoin has been created for a table size of " + table.getTableSize() + " for Thread ID "
+        LOGGER.debug("InMemoryHashJoin has been created for a table size of " + table.getTableSize() + " for Thread ID "
                 + Thread.currentThread().getId() + ".");
     }
 
@@ -207,8 +207,8 @@
             }
         }
         buffers.clear();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("InMemoryHashJoin has finished using " + nFrames + " frames for Thread ID "
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("InMemoryHashJoin has finished using " + nFrames + " frames for Thread ID "
                     + Thread.currentThread().getId() + ".");
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index c795144..8dbe9b0 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -23,8 +23,6 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.VSizeFrame;
@@ -66,6 +64,8 @@
 import org.apache.hyracks.dataflow.std.structures.ISerializableTable;
 import org.apache.hyracks.dataflow.std.structures.SerializableHashTable;
 import org.apache.hyracks.dataflow.std.util.FrameTuplePairComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * @author pouria
@@ -137,7 +137,7 @@
     private boolean forceNLJ = false;
     private boolean forceRoleReversal = false;
 
-    private static final Logger LOGGER = Logger.getLogger(OptimizedHybridHashJoinOperatorDescriptor.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memSizeInFrames,
             int inputsize0, double factor, int[] keys0, int[] keys1,
@@ -294,8 +294,8 @@
                             buildHpc, predEvaluator, isLeftOuter, nonMatchWriterFactories);
 
                     state.hybridHJ.initBuild();
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("OptimizedHybridHashJoin is starting the build phase with " + state.numOfPartitions
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("OptimizedHybridHashJoin is starting the build phase with " + state.numOfPartitions
                                 + " partitions using " + state.memForJoin + " frames for memory.");
                     }
                 }
@@ -312,8 +312,8 @@
                         state.hybridHJ.clearBuildTempFiles();
                     } else {
                         ctx.setStateObject(state);
-                        if (LOGGER.isLoggable(Level.FINE)) {
-                            LOGGER.fine("OptimizedHybridHashJoin closed its build phase");
+                        if (LOGGER.isDebugEnabled()) {
+                            LOGGER.debug("OptimizedHybridHashJoin closed its build phase");
                         }
                     }
                 }
@@ -395,8 +395,8 @@
                     writer.open();
                     state.hybridHJ.initProbe();
 
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("OptimizedHybridHashJoin is starting the probe phase.");
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("OptimizedHybridHashJoin is starting the probe phase.");
                     }
                 }
 
@@ -466,8 +466,8 @@
                 }
 
                 private void logProbeComplete() {
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("OptimizedHybridHashJoin closed its probe phase");
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("OptimizedHybridHashJoin closed its probe phase");
                     }
                 }
 
@@ -484,8 +484,8 @@
                     long probePartSize = (long) Math.ceil((double) probeSideReader.getFileSize() / (double) frameSize);
                     int beforeMax = Math.max(buildSizeInTuple, probeSizeInTuple);
 
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("\n>>>Joining Partition Pairs (thread_id " + Thread.currentThread().getId()
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("\n>>>Joining Partition Pairs (thread_id " + Thread.currentThread().getId()
                                 + ") (pid " + ") - (level " + level + ")" + " - BuildSize:\t" + buildPartSize
                                 + "\tProbeSize:\t" + probePartSize + " - MemForJoin " + (state.memForJoin)
                                 + "  - LeftOuter is " + isLeftOuter);
@@ -505,8 +505,8 @@
                         int tabSize = -1;
                         if (!forceRoleReversal && (isLeftOuter || (buildPartSize < probePartSize))) {
                             //Case 1.1 - InMemHJ (without Role-Reversal)
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine("\t>>>Case 1.1 (IsLeftOuter || buildSize<probe) AND ApplyInMemHJ - [Level "
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug("\t>>>Case 1.1 (IsLeftOuter || buildSize<probe) AND ApplyInMemHJ - [Level "
                                         + level + "]");
                             }
                             tabSize = buildSizeInTuple;
@@ -518,8 +518,8 @@
                             applyInMemHashJoin(buildKeys, probeKeys, tabSize, buildRd, probeRd, buildHpc, probeHpc,
                                     buildSideReader, probeSideReader); // checked-confirmed
                         } else { //Case 1.2 - InMemHJ with Role Reversal
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine("\t>>>Case 1.2. (NoIsLeftOuter || probe<build) AND ApplyInMemHJ"
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug("\t>>>Case 1.2. (NoIsLeftOuter || probe<build) AND ApplyInMemHJ"
                                         + "WITH RoleReversal - [Level " + level + "]");
                             }
                             tabSize = probeSizeInTuple;
@@ -534,22 +534,23 @@
                     }
                     //Apply (Recursive) HHJ
                     else {
-                        if (LOGGER.isLoggable(Level.FINE)) {
-                            LOGGER.fine("\t>>>Case 2. ApplyRecursiveHHJ - [Level " + level + "]");
+                        if (LOGGER.isDebugEnabled()) {
+                            LOGGER.debug("\t>>>Case 2. ApplyRecursiveHHJ - [Level " + level + "]");
                         }
                         if (!forceRoleReversal && (isLeftOuter || buildPartSize < probePartSize)) {
                             //Case 2.1 - Recursive HHJ (without Role-Reversal)
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine("\t\t>>>Case 2.1 - RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level "
-                                        + level + "]");
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug(
+                                        "\t\t>>>Case 2.1 - RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level "
+                                                + level + "]");
                             }
                             applyHybridHashJoin((int) buildPartSize, PROBE_REL, BUILD_REL, probeKeys, buildKeys,
                                     probeRd, buildRd, probeHpc, buildHpc, probeSideReader, buildSideReader, level,
                                     beforeMax);
 
                         } else { //Case 2.2 - Recursive HHJ (with Role-Reversal)
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine(
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug(
                                         "\t\t>>>Case 2.2. - RecursiveHHJ WITH RoleReversal - [Level " + level + "]");
                             }
 
@@ -618,8 +619,8 @@
                         BitSet rPStatus = rHHj.getPartitionStatus();
                         if (!forceNLJ && (afterMax < (NLJ_SWITCH_THRESHOLD * beforeMax))) {
                             //Case 2.1.1 - Keep applying HHJ
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine("\t\t>>>Case 2.1.1 - KEEP APPLYING RecursiveHHJ WITH "
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug("\t\t>>>Case 2.1.1 - KEEP APPLYING RecursiveHHJ WITH "
                                         + "(isLeftOuter || build<probe) - [Level " + level + "]");
                             }
                             for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
@@ -644,8 +645,8 @@
                             }
 
                         } else { //Case 2.1.2 - Switch to NLJ
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine(
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug(
                                         "\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH "
                                                 + "(isLeftOuter || build<probe) - [Level " + level + "]");
                             }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
index c8f9268..4a77b3c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
@@ -22,8 +22,6 @@
 import java.util.BitSet;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.FrameConstants;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -38,6 +36,9 @@
 import org.apache.hyracks.dataflow.common.io.RunFileReader;
 import org.apache.hyracks.dataflow.common.io.RunFileWriter;
 import org.apache.hyracks.dataflow.std.sort.util.GroupVSizeFrame;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractExternalSortRunMerger {
 
@@ -55,7 +56,7 @@
     private VSizeFrame outputFrame;
     private ISorter sorter;
 
-    private static final Logger LOGGER = Logger.getLogger(AbstractExternalSortRunMerger.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public AbstractExternalSortRunMerger(IHyracksTaskContext ctx, ISorter sorter, List<GeneratedRunFileReader> runs,
             IBinaryComparator[] comparators, INormalizedKeyComputer nmkComputer, RecordDescriptor recordDesc,
@@ -147,8 +148,8 @@
 
                         if (currentGenerationRunAvailable.isEmpty()) {
 
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine("generated runs:" + stop);
+                            if (LOGGER.isDebugEnabled()) {
+                                LOGGER.debug("generated runs:" + stop);
                             }
                             runs.subList(0, stop).clear();
                             currentGenerationRunAvailable.clear();
@@ -156,9 +157,7 @@
                             stop = runs.size();
                         }
                     } else {
-                        if (LOGGER.isLoggable(Level.FINE)) {
-                            LOGGER.fine("final runs:" + stop);
-                        }
+                        LOGGER.debug("final runs: {}", stop);
                         merge(finalWriter, partialRuns);
                         break;
                     }
@@ -179,8 +178,8 @@
                     try {
                         reader.close(); // close is idempotent.
                     } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.log(Level.WARNING, e.getMessage(), e);
+                        if (LOGGER.isWarnEnabled()) {
+                            LOGGER.log(Level.WARN, e.getMessage(), e);
                         }
                     }
                 }
@@ -259,8 +258,8 @@
             }
         } finally {
             merger.close();
-            if (LOGGER.isLoggable(Level.FINE)) {
-                LOGGER.fine("Output " + io + " frames");
+            if (LOGGER.isDebugEnabled()) {
+                LOGGER.debug("Output " + io + " frames");
             }
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java
index 6c061ae..eead09e 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.dataflow.std.sort;
 
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAppender;
@@ -41,10 +39,12 @@
 import org.apache.hyracks.dataflow.std.buffermanager.IFrameBufferManager;
 import org.apache.hyracks.dataflow.std.buffermanager.VariableFramePool;
 import org.apache.hyracks.util.IntSerDeUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractFrameSorter implements IFrameSorter {
 
-    protected Logger LOGGER = Logger.getLogger(AbstractFrameSorter.class.getName());
+    protected Logger LOGGER = LogManager.getLogger();
     protected static final int ID_FRAME_ID = 0;
     protected static final int ID_TUPLE_START = 1;
     protected static final int ID_TUPLE_END = 2;
@@ -233,8 +233,8 @@
         }
         maxFrameSize = Math.max(maxFrameSize, outputFrame.getFrameSize());
         outputAppender.write(writer, true);
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine(
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug(
                     "Flushed records:" + limit + " out of " + tupleCount + "; Flushed through " + (io + 1) + " frames");
         }
         return maxFrameSize;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
index 602157f..f274ca1 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
@@ -21,8 +21,6 @@
 
 import java.nio.ByteBuffer;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -45,10 +43,12 @@
 import org.apache.hyracks.dataflow.std.base.AbstractStateObject;
 import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
 import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractSorterOperatorDescriptor extends AbstractOperatorDescriptor {
 
-    private static final Logger LOGGER = Logger.getLogger(AbstractSorterOperatorDescriptor.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final long serialVersionUID = 1L;
 
@@ -132,7 +132,7 @@
                     runGen.close();
                     state.generatedRunFileReaders = runGen.getRuns();
                     state.sorter = runGen.getSorter();
-                    if (LOGGER.isLoggable(Level.INFO)) {
+                    if (LOGGER.isInfoEnabled()) {
                         LOGGER.info("InitialNumberOfRuns:" + runGen.getRuns().size());
                     }
                     ctx.setStateObject(state);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
index 80b36ce..180ecbc 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.dataflow.std.sort;
 
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -34,9 +32,11 @@
 import org.apache.hyracks.dataflow.std.buffermanager.FrameFreeSlotPolicyFactory;
 import org.apache.hyracks.dataflow.std.buffermanager.VariableFrameMemoryManager;
 import org.apache.hyracks.dataflow.std.buffermanager.VariableFramePool;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class HybridTopKSortRunGenerator extends HeapSortRunGenerator {
-    private static final Logger LOG = Logger.getLogger(HybridTopKSortRunGenerator.class.getName());
+    private static final Logger LOG = LogManager.getLogger();
 
     private static final int SWITCH_TO_FRAME_SORTER_THRESHOLD = 2;
     private IFrameSorter frameSorter = null;
@@ -90,8 +90,8 @@
                     }
                     tupleSorter.close();
                     tupleSorter = null;
-                    if (LOG.isLoggable(Level.FINE)) {
-                        LOG.fine("clear tupleSorter");
+                    if (LOG.isDebugEnabled()) {
+                        LOG.debug("clear tupleSorter");
                     }
                 }
             }
@@ -103,8 +103,8 @@
                                 frameLimit - 1));
                 frameSorter = new FrameSorterMergeSort(ctx, bufferManager, frameLimit - 1, sortFields, nmkFactories,
                         comparatorFactories, recordDescriptor, topK);
-                if (LOG.isLoggable(Level.FINE)) {
-                    LOG.fine("create frameSorter");
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("create frameSorter");
                 }
             }
             if (!frameSorter.insertFrame(buffer)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java
index 980857a..b02f859 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java
@@ -21,8 +21,6 @@
 
 import java.util.Arrays;
 import java.util.Comparator;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -42,10 +40,12 @@
 import org.apache.hyracks.dataflow.std.structures.IResetableComparableFactory;
 import org.apache.hyracks.dataflow.std.structures.MaxHeap;
 import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class TupleSorterHeapSort implements ITupleSorter {
 
-    private static final Logger LOGGER = Logger.getLogger(TupleSorterHeapSort.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     class HeapEntryFactory implements IResetableComparableFactory<HeapEntry> {
         @Override
@@ -304,7 +304,7 @@
         }
         maxFrameSize = Math.max(maxFrameSize, outputFrame.getFrameSize());
         outputAppender.write(writer, true);
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Flushed records:" + numEntries + "; Flushed through " + (io + 1) + " frames");
         }
         return maxFrameSize;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index d894d9b..7472aa8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -179,5 +179,9 @@
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
index 8696f8b..7a675bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
@@ -24,8 +24,6 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameReader;
@@ -44,10 +42,12 @@
 import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import org.apache.hyracks.test.support.TestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 public class SerializationDeserializationTest {
-    private static final Logger LOGGER = Logger.getLogger(SerializationDeserializationTest.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final String DBLP_FILE = "data" + File.separator + "device1" + File.separator + "data"
             + File.separator + "dblp.txt";
 
@@ -139,7 +139,7 @@
         reader.open();
         Object[] arr;
         while ((arr = reader.readData()) != null) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info(arr[0] + " " + arr[1]);
             }
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index 82fd737..ae27ac9 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -30,8 +30,6 @@
 import java.util.EnumSet;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.client.HyracksConnection;
@@ -55,12 +53,14 @@
 import org.apache.hyracks.control.nc.resources.memory.FrameManager;
 import org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
 import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 
 public abstract class AbstractIntegrationTest {
-    private static final Logger LOGGER = Logger.getLogger(AbstractIntegrationTest.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public static final String NC1_ID = "nc1";
     public static final String NC2_ID = "nc2";
@@ -121,7 +121,7 @@
         nc2.start();
 
         hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
         }
     }
@@ -134,11 +134,11 @@
     }
 
     protected JobId executeTest(JobSpecification spec) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(spec.toJSON().asText());
         }
         JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(jobId.toString());
         }
         return jobId;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index cc46a7d..a455cc9 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -23,8 +23,6 @@
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.client.HyracksConnection;
@@ -50,6 +48,8 @@
 import org.apache.hyracks.control.nc.resources.memory.FrameManager;
 import org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
 import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
@@ -58,7 +58,7 @@
 
 public abstract class AbstractMultiNCIntegrationTest {
 
-    private static final Logger LOGGER = Logger.getLogger(AbstractMultiNCIntegrationTest.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final TestJobLifecycleListener jobLifecycleListener = new TestJobLifecycleListener();
 
     public static final String[] ASTERIX_IDS =
@@ -113,7 +113,7 @@
         }
 
         hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
         }
     }
@@ -144,11 +144,11 @@
     }
 
     protected void runTest(JobSpecification spec, String expectedErrorMessage) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(spec.toJSON().asText());
         }
         JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(jobId.toString());
         }
 
@@ -218,7 +218,7 @@
     }
 
     private void dumpOutputFiles() {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             for (File f : outputFiles) {
                 if (f.exists() && f.isFile()) {
                     try {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
index dd4fdd1..553c5b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
@@ -25,8 +25,6 @@
 import java.io.File;
 import java.lang.reflect.Field;
 import java.util.HashMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hyracks.api.client.HyracksConnection;
@@ -40,6 +38,8 @@
 import org.apache.hyracks.control.common.controllers.CCConfig;
 import org.apache.hyracks.control.common.controllers.NCConfig;
 import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -47,7 +47,7 @@
 import org.mockito.Mockito;
 
 public class DeployedJobSpecsTest {
-    private static final Logger LOGGER = Logger.getLogger(DeployedJobSpecsTest.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final String NC1_ID = "nc1";
     private static final String NC2_ID = "nc2";
@@ -111,7 +111,7 @@
         nc2.start();
 
         hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java
index c8d0b9c..008be29 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java
@@ -24,18 +24,19 @@
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.job.IJobLifecycleListener;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.api.job.JobStatus;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class TestJobLifecycleListener implements IJobLifecycleListener {
 
-    private static final Logger LOGGER = Logger.getLogger(TestJobLifecycleListener.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final Map<JobId, JobSpecification> created = new HashMap<>();
     private final Set<JobId> started = new HashSet<>();
     private final Set<JobId> finished = new HashSet<>();
@@ -48,7 +49,7 @@
     @Override
     public void notifyJobCreation(JobId jobId, JobSpecification spec) throws HyracksException {
         if (created.containsKey(jobId)) {
-            LOGGER.log(Level.WARNING, "Job " + jobId + "has been created before");
+            LOGGER.log(Level.WARN, "Job " + jobId + "has been created before");
             increment(doubleCreated, jobId);
         }
         created.put(jobId, spec);
@@ -63,11 +64,11 @@
     @Override
     public void notifyJobStart(JobId jobId) throws HyracksException {
         if (!created.containsKey(jobId)) {
-            LOGGER.log(Level.WARNING, "Job " + jobId + "has not been created");
+            LOGGER.log(Level.WARN, "Job " + jobId + "has not been created");
             startWithoutCreate.add(jobId);
         }
         if (started.contains(jobId)) {
-            LOGGER.log(Level.WARNING, "Job " + jobId + "has been started before");
+            LOGGER.log(Level.WARN, "Job " + jobId + "has been started before");
             increment(doubleStarted, jobId);
         }
         started.add(jobId);
@@ -76,43 +77,43 @@
     @Override
     public void notifyJobFinish(JobId jobId, JobStatus jobStatus, List<Exception> exceptions) throws HyracksException {
         if (!started.contains(jobId)) {
-            LOGGER.log(Level.WARNING, "Job " + jobId + "has not been started");
+            LOGGER.log(Level.WARN, "Job " + jobId + "has not been started");
             finishWithoutStart.add(jobId);
         }
         if (finished.contains(jobId)) {
             // TODO: job finish should be called once only when it has really completed
             // throw new HyracksDataException("Job " + jobId + "has been finished before");
-            LOGGER.log(Level.WARNING, "Dangerous: Duplicate Job: " + jobId + " has finished with status: " + jobStatus);
+            LOGGER.log(Level.WARN, "Dangerous: Duplicate Job: " + jobId + " has finished with status: " + jobStatus);
             increment(doubleFinished, jobId);
         }
         finished.add(jobId);
     }
 
     public void check() throws Exception {
-        LOGGER.log(Level.WARNING, "Checking all created jobs have started");
+        LOGGER.log(Level.WARN, "Checking all created jobs have started");
         for (JobId jobId : created.keySet()) {
             if (!started.contains(jobId)) {
-                LOGGER.log(Level.WARNING, "JobId " + jobId + " has been created but never started");
+                LOGGER.log(Level.WARN, "JobId " + jobId + " has been created but never started");
             }
         }
-        LOGGER.log(Level.WARNING, "Checking all started jobs have terminated");
+        LOGGER.log(Level.WARN, "Checking all started jobs have terminated");
         for (JobId jobId : started) {
             if (!finished.contains(jobId)) {
-                LOGGER.log(Level.WARNING, "JobId " + jobId + " has started but not finished");
+                LOGGER.log(Level.WARN, "JobId " + jobId + " has started but not finished");
             }
         }
-        LOGGER.log(Level.WARNING, "Checking multiple creates");
+        LOGGER.log(Level.WARN, "Checking multiple creates");
         for (Entry<JobId, Integer> entry : doubleCreated.entrySet()) {
-            LOGGER.log(Level.WARNING, "job " + entry.getKey() + " has been created " + entry.getValue() + " times");
+            LOGGER.log(Level.WARN, "job " + entry.getKey() + " has been created " + entry.getValue() + " times");
         }
-        LOGGER.log(Level.WARNING, "Checking multiple starts");
+        LOGGER.log(Level.WARN, "Checking multiple starts");
         for (Entry<JobId, Integer> entry : doubleStarted.entrySet()) {
-            LOGGER.log(Level.WARNING, "job " + entry.getKey() + " has been started " + entry.getValue() + " times");
+            LOGGER.log(Level.WARN, "job " + entry.getKey() + " has been started " + entry.getValue() + " times");
         }
-        LOGGER.log(Level.WARNING, "Checking multiple finishes");
+        LOGGER.log(Level.WARN, "Checking multiple finishes");
         for (Entry<JobId, Integer> entry : doubleFinished.entrySet()) {
-            LOGGER.log(Level.WARNING, "job " + entry.getKey() + " has been finished " + entry.getValue() + " times");
+            LOGGER.log(Level.WARN, "job " + entry.getKey() + " has been finished " + entry.getValue() + " times");
         }
-        LOGGER.log(Level.WARNING, "Done checking!");
+        LOGGER.log(Level.WARN, "Done checking!");
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java
index 6503b7b..799520c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java
@@ -18,8 +18,6 @@
  */
 package org.apache.hyracks.tests.integration;
 
-import java.util.logging.Logger;
-
 import org.apache.commons.lang3.mutable.MutableBoolean;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -35,7 +33,6 @@
     public static final MutableBoolean CONTINUE_RUNNING = new MutableBoolean(false);
 
     private static final long serialVersionUID = 1L;
-    private static Logger LOGGER = Logger.getLogger(WaitingOperatorDescriptor.class.getName());
 
     public WaitingOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity, int outputArity) {
         super(spec, inputArity, outputArity);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
index d704671..10d6947 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
@@ -20,8 +20,6 @@
 
 import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -31,10 +29,13 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ExceptionOnCreatePushRuntimeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
     private static final long serialVersionUID = 1L;
-    private static Logger LOGGER = Logger.getLogger(ExceptionOnCreatePushRuntimeOperatorDescriptor.class.getName());
+    private static Logger LOGGER = LogManager.getLogger();
     private static AtomicInteger createPushRuntime = new AtomicInteger();
     private static AtomicInteger initializeCounter = new AtomicInteger();
     private static AtomicInteger openCloseCounter = new AtomicInteger();
@@ -130,10 +131,10 @@
     public static boolean succeed() {
         boolean success = openCloseCounter.get() == 0 && createPushRuntime.get() == 0 && initializeCounter.get() == 0;
         if (!success) {
-            LOGGER.log(Level.SEVERE, "Failure:");
-            LOGGER.log(Level.SEVERE, "CreatePushRuntime:" + createPushRuntime.get());
-            LOGGER.log(Level.SEVERE, "InitializeCounter:" + initializeCounter.get());
-            LOGGER.log(Level.SEVERE, "OpenCloseCounter:" + openCloseCounter.get());
+            LOGGER.log(Level.ERROR, "Failure:");
+            LOGGER.log(Level.ERROR, "CreatePushRuntime:" + createPushRuntime.get());
+            LOGGER.log(Level.ERROR, "InitializeCounter:" + initializeCounter.get());
+            LOGGER.log(Level.ERROR, "OpenCloseCounter:" + openCloseCounter.get());
         }
         return success;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
index 478650f..1514c8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
@@ -209,5 +209,9 @@
       <artifactId>hyracks-control-cc</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
index c1083c9f..6f4d8b1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
@@ -19,8 +19,9 @@
 package org.apache.hyracks.examples.shutdown.test;
 
 import java.net.ServerSocket;
-import java.util.logging.Logger;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -30,7 +31,7 @@
 import org.apache.hyracks.ipc.exceptions.IPCException;
 
 public class ClusterShutdownIT {
-    private static Logger LOGGER = Logger.getLogger(ClusterShutdownIT.class.getName());
+    private static Logger LOGGER = LogManager.getLogger();
     @Rule
     public ExpectedException closeTwice = ExpectedException.none();
     @Test
@@ -49,7 +50,7 @@
             s = new ServerSocket(1099);
             //and we should be able to bind to this too
         } catch (Exception e) {
-            LOGGER.severe(e.getMessage());
+            LOGGER.error(e.getMessage());
             throw e;
         } finally {
             s.close();
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
index 678645c..e1c91c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -410,5 +410,9 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
index 4937a15..c9bf547 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -25,7 +25,6 @@
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.TreeMap;
-import java.util.logging.Logger;
 
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.mapred.InputSplit;
@@ -33,10 +32,12 @@
 import org.apache.hyracks.api.topology.ClusterTopology;
 import org.apache.hyracks.hdfs.api.INcCollection;
 import org.apache.hyracks.hdfs.api.INcCollectionBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 @SuppressWarnings("deprecation")
 public class RackAwareNcCollectionBuilder implements INcCollectionBuilder {
-    private static final Logger LOGGER = Logger.getLogger(RackAwareNcCollectionBuilder.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private ClusterTopology topology;
 
     public RackAwareNcCollectionBuilder(ClusterTopology topology) {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
index f9b68bc..615f827 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
@@ -30,7 +30,6 @@
 import java.util.Map;
 import java.util.PriorityQueue;
 import java.util.Random;
-import java.util.logging.Logger;
 
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.mapred.InputSplit;
@@ -42,13 +41,15 @@
 import org.apache.hyracks.api.topology.ClusterTopology;
 import org.apache.hyracks.hdfs.api.INcCollection;
 import org.apache.hyracks.hdfs.api.INcCollectionBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * The scheduler conduct data-local scheduling for data reading on HDFS. This
  * class works for Hadoop old API.
  */
 public class Scheduler {
-    private static final Logger LOGGER = Logger.getLogger(Scheduler.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     /** a list of NCs */
     private String[] NCs;
diff --git a/hyracks-fullstack/hyracks/hyracks-http/pom.xml b/hyracks-fullstack/hyracks/hyracks-http/pom.xml
index 7d0ddb2..099c429 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-http/pom.xml
@@ -75,5 +75,9 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
index d0637ca..21653c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
@@ -24,13 +24,14 @@
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServlet;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.SerializationFeature;
@@ -40,7 +41,7 @@
 import io.netty.handler.codec.http.HttpResponseStatus;
 
 public abstract class AbstractServlet implements IServlet {
-    private static final Logger LOGGER = Logger.getLogger(AbstractServlet.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     protected static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 
     static {
@@ -99,11 +100,11 @@
                 notAllowed(method, response);
             }
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Unhandled exception", e);
+            LOGGER.log(Level.WARN, "Unhandled exception", e);
             response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
         } catch (Throwable th) { //NOSONAR Just logging and then throwing again
             try {
-                LOGGER.log(Level.WARNING, "Unhandled throwable", th);
+                LOGGER.log(Level.WARN, "Unhandled throwable", th);
             } catch (Throwable loggingFailure) {// NOSONAR... swallow logging failure
             }
             throw th;
@@ -116,7 +117,7 @@
         if (message != null) {
             response.writer().println(message);
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("sendError: status=" + status + ", message=" + message);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java
index e4f0777..d5f81e5 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java
@@ -20,8 +20,10 @@
 
 import java.io.IOException;
 import java.io.OutputStream;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandlerContext;
@@ -31,7 +33,7 @@
 
 public class ChunkedNettyOutputStream extends OutputStream {
 
-    private static final Logger LOGGER = Logger.getLogger(ChunkedNettyOutputStream.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final ChannelHandlerContext ctx;
     private final ChunkedResponse response;
     private ByteBuf buffer;
@@ -132,7 +134,7 @@
                 wait();
             } catch (InterruptedException e) {
                 Thread.currentThread().interrupt();
-                LOGGER.log(Level.WARNING, "Interupted while waiting for channel to be writable", e);
+                LOGGER.log(Level.WARN, "Interupted while waiting for channel to be writable", e);
                 throw new IOException(e);
             }
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java
index d8e9a9a..323a463 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java
@@ -21,10 +21,11 @@
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.PrintWriter;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServletResponse;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelFuture;
@@ -60,7 +61,7 @@
  */
 public class ChunkedResponse implements IServletResponse {
 
-    private static final Logger LOGGER = Logger.getLogger(ChunkedResponse.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final ChannelHandlerContext ctx;
     private final ChunkedNettyOutputStream outputStream;
     private final PrintWriter writer;
@@ -112,7 +113,7 @@
         } else {
             // There was an error
             if (headerSent) {
-                LOGGER.log(Level.WARNING, "Error after header write of chunked response");
+                LOGGER.log(Level.WARN, "Error after header write of chunked response");
                 if (error != null) {
                     error.release();
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
index 13ed1fc..cb6ad0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
@@ -18,8 +18,9 @@
  */
 package org.apache.hyracks.http.server;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.channel.ChannelFutureListener;
 import io.netty.channel.ChannelHandlerContext;
@@ -40,7 +41,7 @@
  */
 public class HttpRequestCapacityController extends ChannelInboundHandlerAdapter {
 
-    private static final Logger LOGGER = Logger.getLogger(HttpRequestCapacityController.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final HttpServer server;
     private boolean overloaded = false;
 
@@ -74,7 +75,7 @@
             ctx.writeAndFlush(ctx.alloc().buffer(0), promise);
         } catch (Throwable th) {//NOSONAR
             try {
-                LOGGER.log(Level.SEVERE, "Failure during request rejection", th);
+                LOGGER.log(Level.ERROR, "Failure during request rejection", th);
             } catch (Throwable loggingFailure) {//NOSONAR
             }
             PromiseNotificationUtil.tryFailure(promise, th, null);
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java
index 555f845..bf8e629 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java
@@ -20,12 +20,13 @@
 
 import java.io.IOException;
 import java.util.concurrent.Callable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServlet;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.channel.ChannelFuture;
 import io.netty.channel.ChannelFutureListener;
@@ -34,7 +35,7 @@
 import io.netty.handler.codec.http.HttpUtil;
 
 public class HttpRequestHandler implements Callable<Void> {
-    private static final Logger LOGGER = Logger.getLogger(HttpRequestHandler.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final ChannelHandlerContext ctx;
     private final IServlet servlet;
     private final IServletRequest request;
@@ -57,7 +58,7 @@
                 lastContentFuture.addListener(ChannelFutureListener.CLOSE);
             }
         } catch (Throwable th) { //NOSONAR
-            LOGGER.log(Level.SEVERE, "Failure handling HTTP Request", th);
+            LOGGER.log(Level.ERROR, "Failure handling HTTP Request", th);
             ctx.close();
         } finally {
             request.getHttpRequest().release();
@@ -69,7 +70,7 @@
         try {
             servlet.handle(request, response);
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Failure during handling of an IServletRequest", e);
+            LOGGER.log(Level.WARN, "Failure during handling of an IServletRequest", e);
             response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
         } finally {
             response.close();
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java
index 71ddbc0..19436ab 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java
@@ -27,11 +27,12 @@
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServlet;
 import org.apache.hyracks.util.ThreadDumpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.bootstrap.ServerBootstrap;
 import io.netty.buffer.PooledByteBufAllocator;
@@ -54,7 +55,7 @@
     protected static final int RECEIVE_BUFFER_SIZE = 4096;
     protected static final int DEFAULT_NUM_EXECUTOR_THREADS = 16;
     protected static final int DEFAULT_REQUEST_QUEUE_SIZE = 256;
-    private static final Logger LOGGER = Logger.getLogger(HttpServer.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final int FAILED = -1;
     private static final int STOPPED = 0;
     private static final int STARTING = 1;
@@ -111,7 +112,7 @@
                 doStart();
                 setStarted();
             } catch (Throwable e) { // NOSONAR
-                LOGGER.log(Level.SEVERE, "Failure starting an Http Server with port: " + port, e);
+                LOGGER.log(Level.ERROR, "Failure starting an Http Server with port: " + port, e);
                 setFailed(e);
                 throw e;
             }
@@ -128,7 +129,7 @@
                 doStop();
                 setStopped();
             } catch (Throwable e) { // NOSONAR
-                LOGGER.log(Level.SEVERE, "Failure stopping an Http Server", e);
+                LOGGER.log(Level.ERROR, "Failure stopping an Http Server", e);
                 setFailed(e);
                 throw e;
             }
@@ -229,15 +230,15 @@
             // wait 30s for interrupted requests to unwind
             executor.awaitTermination(30, TimeUnit.SECONDS);
             if (!executor.isTerminated()) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.log(Level.SEVERE,
+                if (LOGGER.isErrorEnabled()) {
+                    LOGGER.log(Level.ERROR,
                             "Failed to shutdown http server executor; thread dump: " + ThreadDumpUtil.takeDumpString());
                 } else {
-                    LOGGER.log(Level.SEVERE, "Failed to shutdown http server executor");
+                    LOGGER.log(Level.ERROR, "Failed to shutdown http server executor");
                 }
             }
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Error while shutting down http server executor", e);
+            LOGGER.log(Level.ERROR, "Error while shutting down http server executor", e);
         }
         channel.close();
         channel.closeFuture().sync();
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java
index 0984ebf..9290cdf 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java
@@ -20,12 +20,13 @@
 
 import java.io.IOException;
 import java.util.concurrent.RejectedExecutionException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServlet;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.channel.ChannelFutureListener;
 import io.netty.channel.ChannelHandlerContext;
@@ -37,7 +38,7 @@
 
 public class HttpServerHandler<T extends HttpServer> extends SimpleChannelInboundHandler<Object> {
 
-    private static final Logger LOGGER = Logger.getLogger(HttpServerHandler.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     protected final T server;
     protected final int chunkSize;
     protected HttpRequestHandler handler;
@@ -71,7 +72,7 @@
                 submit(ctx, servlet, request);
             }
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "Failure Submitting HTTP Request", e);
+            LOGGER.log(Level.ERROR, "Failure Submitting HTTP Request", e);
             respond(ctx, request.protocolVersion(), new HttpResponseStatus(500, e.getMessage()));
         }
     }
@@ -86,7 +87,7 @@
         try {
             servletRequest = HttpUtil.toServletRequest(request);
         } catch (IllegalArgumentException e) {
-            LOGGER.log(Level.WARNING, "Failure Decoding Request", e);
+            LOGGER.log(Level.WARN, "Failure Decoding Request", e);
             respond(ctx, request.protocolVersion(), HttpResponseStatus.BAD_REQUEST);
             return;
         }
@@ -98,21 +99,21 @@
         try {
             server.getExecutor(handler).submit(handler);
         } catch (RejectedExecutionException e) { // NOSONAR
-            LOGGER.log(Level.WARNING, "Request rejected by server executor service. " + e.getMessage());
+            LOGGER.log(Level.WARN, "Request rejected by server executor service. " + e.getMessage());
             handler.reject();
         }
     }
 
     protected void handleServletNotFound(ChannelHandlerContext ctx, FullHttpRequest request) {
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("No servlet for " + request.uri());
+        if (LOGGER.isWarnEnabled()) {
+            LOGGER.warn("No servlet for " + request.uri());
         }
         respond(ctx, request.protocolVersion(), HttpResponseStatus.NOT_FOUND);
     }
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
-        LOGGER.log(Level.SEVERE, "Failure handling HTTP Request", cause);
+        LOGGER.log(Level.ERROR, "Failure handling HTTP Request", cause);
         ctx.close();
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java
index b21e533..e2b3237 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java
@@ -22,19 +22,20 @@
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.handler.codec.http.HttpResponseStatus;
 
 public class StaticResourceServlet extends AbstractServlet {
 
-    private static final Logger LOGGER = Logger.getLogger(StaticResourceServlet.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public StaticResourceServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
         super(ctx, paths);
@@ -63,7 +64,7 @@
                 try {
                     IOUtils.copy(is, out);
                 } catch (IOException e) {
-                    LOGGER.log(Level.WARNING, "Failure copying response", e);
+                    LOGGER.log(Level.WARN, "Failure copying response", e);
                 } finally {
                     if (out != null) {
                         IOUtils.closeQuietly(out);
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java
index 99e334c..ffa4d4b 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java
@@ -28,14 +28,15 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 import java.util.regex.Pattern;
 
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.BaseRequest;
 import org.apache.hyracks.http.server.FormUrlEncodedRequest;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.buffer.PooledByteBufAllocator;
 import io.netty.handler.codec.http.FullHttpRequest;
@@ -45,7 +46,7 @@
 
 public class HttpUtil {
 
-    private static final Logger LOGGER = Logger.getLogger(HttpUtil.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final Pattern PARENT_DIR = Pattern.compile("/[^./]+/\\.\\./");
     private static long maxMemUsage = 0L;
 
@@ -207,7 +208,7 @@
             report.append('\n');
             report.append('\n');
         } catch (Throwable th) { // NOSONAR
-            LOGGER.log(Level.WARNING, "Failed to access PlatformDependent.DIRECT_MEMORY_COUNTER", th);
+            LOGGER.log(Level.WARN, "Failed to access PlatformDependent.DIRECT_MEMORY_COUNTER", th);
             return;
         }
         report.append("--------------- PooledByteBufAllocator.DEFAULT ----------------");
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java
index 5bd2e38..e6aedb9 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java
@@ -19,18 +19,19 @@
 package org.apache.hyracks.http.servlet;
 
 import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.http.api.IServletRequest;
 import org.apache.hyracks.http.api.IServletResponse;
 import org.apache.hyracks.http.server.AbstractServlet;
 import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import io.netty.handler.codec.http.HttpResponseStatus;
 
 public class ChattyServlet extends AbstractServlet {
-    private static final Logger LOGGER = Logger.getLogger(ChattyServlet.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private byte[] bytes;
 
     public ChattyServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
@@ -53,7 +54,7 @@
     protected void get(IServletRequest request, IServletResponse response) throws Exception {
         response.setStatus(HttpResponseStatus.OK);
         HttpUtil.setContentType(response, HttpUtil.ContentType.TEXT_HTML, HttpUtil.Encoding.UTF8);
-        LOGGER.log(Level.WARNING, "I am about to flood you... and a single buffer is " + bytes.length + " bytes");
+        LOGGER.log(Level.WARN, "I am about to flood you... and a single buffer is " + bytes.length + " bytes");
         for (int i = 0; i < 100; i++) {
             response.outputStream().write(bytes);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml b/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
index b49e99e..5ef436f 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
@@ -49,5 +49,9 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
index 36cf2fd..7688974 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
@@ -39,13 +39,14 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class IPCConnectionManager {
-    private static final Logger LOGGER = Logger.getLogger(IPCConnectionManager.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     // TODO(mblow): the next two could be config parameters
     private static final int INITIAL_RETRY_DELAY_MILLIS = 100;
@@ -120,7 +121,7 @@
                 return handle;
             }
             if (maxRetries < 0 || retries++ < maxRetries) {
-                LOGGER.warning("Connection to " + remoteAddress + " failed; retrying" + (maxRetries <= 0 ? ""
+                LOGGER.warn("Connection to " + remoteAddress + " failed; retrying" + (maxRetries <= 0 ? ""
                         : " (retry attempt " + retries + " of " + maxRetries + ") after " + delay + "ms"));
                 Thread.sleep(delay);
                 delay = Math.min(MAX_RETRY_DELAY_MILLIS, (int) (delay * 1.5));
@@ -136,8 +137,8 @@
     }
 
     synchronized void write(Message msg) {
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Enqueued message: " + msg);
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug("Enqueued message: " + msg);
         }
         sendList.add(msg);
         networkThread.selector.wakeup();
@@ -209,8 +210,8 @@
             int failingLoops = 0;
             while (!stopped) {
                 try {
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("Starting Select");
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("Starting Select");
                     }
                     int n = selector.select();
                     collectOutstandingWork();
@@ -238,9 +239,7 @@
                         int len = workingSendList.size();
                         for (int i = 0; i < len; ++i) {
                             Message msg = workingSendList.get(i);
-                            if (LOGGER.isLoggable(Level.FINE)) {
-                                LOGGER.fine("Processing send of message: " + msg);
-                            }
+                            LOGGER.debug(() -> "Processing send of message: " + msg);
                             IPCHandle handle = msg.getIPCHandle();
                             if (handle.getState() != HandleState.CLOSED) {
                                 if (!handle.full()) {
@@ -340,7 +339,7 @@
                     failingLoops = 0;
                 } catch (Exception e) {
                     int sleepSecs = (int)Math.pow(2, Math.min(11, failingLoops++));
-                    LOGGER.log(Level.SEVERE, "Exception processing message; sleeping " + sleepSecs
+                    LOGGER.log(Level.ERROR, "Exception processing message; sleeping " + sleepSecs
                             + " seconds", e);
                     try {
                         Thread.sleep(TimeUnit.SECONDS.toMillis(sleepSecs));
@@ -364,10 +363,10 @@
             try {
                 connectFinished = channel.finishConnect();
                 if (!connectFinished) {
-                    LOGGER.log(Level.WARNING, "Channel connect did not finish");
+                    LOGGER.log(Level.WARN, "Channel connect did not finish");
                 }
             } catch (IOException e) {
-                LOGGER.log(Level.WARNING, "Exception finishing channel connect", e);
+                LOGGER.log(Level.WARN, "Exception finishing channel connect", e);
             }
             return connectFinished;
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java
index f7e0af1..8e38651 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java
@@ -21,17 +21,18 @@
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.ipc.api.IIPCHandle;
 import org.apache.hyracks.ipc.api.IIPCI;
 import org.apache.hyracks.ipc.api.IPCPerformanceCounters;
 import org.apache.hyracks.ipc.api.IPayloadSerializerDeserializer;
 import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class IPCSystem {
-    private static final Logger LOGGER = Logger.getLogger(IPCSystem.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final IPCConnectionManager cMgr;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-net/pom.xml b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
index 8da177e..66edba6 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
@@ -51,5 +51,9 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java
index e50ffd2..0b548f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java
@@ -21,17 +21,17 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayDeque;
 import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IBufferAcceptor;
 import org.apache.hyracks.api.comm.IChannelControlBlock;
 import org.apache.hyracks.api.comm.IChannelWriteInterface;
 import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractChannelWriteInterface implements IChannelWriteInterface {
 
-    private static final Logger LOGGER = Logger.getLogger(AbstractChannelWriteInterface.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     protected final IChannelControlBlock ccb;
     protected final Queue<ByteBuffer> wiFullQueue;
     protected boolean channelWritabilityState;
@@ -129,8 +129,8 @@
         public void close() {
             synchronized (ccb) {
                 if (eos) {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Received duplicate close() on channel: " + ccb.getChannelId());
+                    if (LOGGER.isWarnEnabled()) {
+                        LOGGER.warn("Received duplicate close() on channel: " + ccb.getChannelId());
                     }
                     return;
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java
index 49bb292..bd42560 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java
@@ -20,15 +20,15 @@
 
 import java.util.Arrays;
 import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
 import org.apache.hyracks.api.comm.MuxDemuxCommand;
 import org.apache.hyracks.api.exceptions.NetException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ChannelSet {
-    private static final Logger LOGGER = Logger.getLogger(ChannelSet.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final int INITIAL_SIZE = 16;
 
@@ -82,8 +82,8 @@
             ChannelControlBlock ccb = ccbArray[i];
             if (ccb != null) {
                 if (ccb.completelyClosed()) {
-                    if (LOGGER.isLoggable(Level.FINE)) {
-                        LOGGER.fine("Cleaning free channel: " + ccb);
+                    if (LOGGER.isDebugEnabled()) {
+                        LOGGER.debug("Cleaning free channel: " + ccb);
                     }
                     freeChannel(ccb);
                 }
@@ -218,8 +218,8 @@
         if (ccbArray[idx] != null) {
             assert ccbArray[idx].completelyClosed() : ccbArray[idx].toString();
             if (ccbArray[idx].completelyClosed()) {
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.fine("Cleaning free channel: " + ccbArray[idx]);
+                if (LOGGER.isDebugEnabled()) {
+                    LOGGER.debug("Cleaning free channel: " + ccbArray[idx]);
                 }
                 freeChannel(ccbArray[idx]);
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java
index 418ebd7..17b70a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java
@@ -18,18 +18,17 @@
  */
 package org.apache.hyracks.net.protocols.muxdemux;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.comm.IBufferFactory;
 import org.apache.hyracks.api.comm.IChannelControlBlock;
 import org.apache.hyracks.api.comm.IConnectionWriterState;
 import org.apache.hyracks.api.comm.MuxDemuxCommand;
 import org.apache.hyracks.api.exceptions.NetException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class FullFrameChannelWriteInterface extends AbstractChannelWriteInterface {
 
-    private static final Logger LOGGER = Logger.getLogger(FullFrameChannelWriteInterface.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     FullFrameChannelWriteInterface(IChannelControlBlock ccb) {
         super(ccb);
@@ -77,8 +76,8 @@
         }
         channelWritabilityState = true;
         if (eos) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Received duplicate close() on channel: " + channelId);
+            if (LOGGER.isWarnEnabled()) {
+                LOGGER.warn("Received duplicate close() on channel: " + channelId);
             }
             return;
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
index 8c54c65..81636de 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
@@ -23,8 +23,6 @@
 import java.nio.channels.SelectionKey;
 import java.nio.channels.SocketChannel;
 import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IChannelControlBlock;
 import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
@@ -33,6 +31,8 @@
 import org.apache.hyracks.api.exceptions.NetException;
 import org.apache.hyracks.net.protocols.tcp.ITCPConnectionEventListener;
 import org.apache.hyracks.net.protocols.tcp.TCPConnection;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 /**
  * A {@link MultiplexedConnection} can be used by clients to create multiple "channels"
@@ -41,7 +41,7 @@
  * @author vinayakb
  */
 public class MultiplexedConnection implements ITCPConnectionEventListener {
-    private static final Logger LOGGER = Logger.getLogger(MultiplexedConnection.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final int MAX_CHUNKS_READ_PER_CYCLE = 4;
 
@@ -366,8 +366,8 @@
                 }
                 readerState.readBuffer.flip();
                 readerState.command.read(readerState.readBuffer);
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.fine("Received command: " + readerState.command);
+                if (LOGGER.isDebugEnabled()) {
+                    LOGGER.debug("Received command: " + readerState.command);
                 }
                 ChannelControlBlock ccb = null;
                 switch (readerState.command.getCommandType()) {
@@ -409,8 +409,8 @@
                         muxDemux.getChannelOpenListener().channelOpened(ccb);
                     }
                 }
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.fine("Applied command: " + readerState.command + " on " + ccb);
+                if (LOGGER.isDebugEnabled()) {
+                    LOGGER.debug("Applied command: " + readerState.command + " on " + ccb);
                 }
             }
             if (readerState.pendingReadSize > 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-server/pom.xml b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
index 8ad94e3..42a29fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
@@ -176,5 +176,9 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-databind</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java
index c387ceb..0c7be89 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java
@@ -25,11 +25,12 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 abstract class HyracksServerProcess {
-    private static final Logger LOGGER = Logger.getLogger(HyracksServerProcess.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     protected Process process;
     protected File configFile = null;
@@ -39,14 +40,14 @@
 
     public void start() throws IOException {
         String[] cmd = buildCommand();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Starting command: " + Arrays.toString(cmd));
         }
 
         ProcessBuilder pb = new ProcessBuilder(cmd);
         pb.redirectErrorStream(true);
         if (logFile != null) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Logging to: " + logFile.getCanonicalPath());
             }
             logFile.getParentFile().mkdirs();
@@ -55,7 +56,7 @@
             }
             pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logFile));
         } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Logfile not set, subprocess will output to stdout");
             }
         }
@@ -67,8 +68,8 @@
         process.destroy();
         try {
             boolean success = process.waitFor(30, TimeUnit.SECONDS);
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Killing unresponsive NC Process");
+            if (LOGGER.isWarnEnabled()) {
+                LOGGER.warn("Killing unresponsive NC Process");
             }
             if (!success) {
                 process.destroyForcibly();
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
index c3bdb2a..d12c05b 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
@@ -22,13 +22,11 @@
 import java.io.IOException;
 import java.net.InetAddress;
 import java.util.Iterator;
-import java.util.logging.Logger;
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import junit.framework.Assert;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.http.HttpResponse;
 import org.apache.http.HttpStatus;
 import org.apache.http.client.HttpClient;
@@ -37,6 +35,8 @@
 import org.apache.http.util.EntityUtils;
 import org.apache.hyracks.server.process.HyracksVirtualCluster;
 import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -47,7 +47,7 @@
     private static final String LOG_DIR = FileUtil.joinPath(TARGET_DIR, "failsafe-reports");
     private static final String RESOURCE_DIR = FileUtil.joinPath(TARGET_DIR, "test-classes", "NCServiceIT");
     private static final String APP_HOME = FileUtil.joinPath(TARGET_DIR, "appassembler");
-    private static final Logger LOGGER = Logger.getLogger(NCServiceIT.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static HyracksVirtualCluster cluster = null;
 
@@ -139,7 +139,7 @@
             setUp();
         } catch (Exception e) {
             e.printStackTrace();
-            LOGGER.severe("TEST CASE(S) FAILED");
+            LOGGER.error("TEST CASE(S) FAILED");
         } finally {
             tearDown();
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
index 2b3f3cb..1d52034 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
@@ -90,5 +90,9 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java
index 9f648c5..f62860a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.storage.am.common.build;
 
 import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -36,9 +34,12 @@
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.apache.hyracks.storage.common.LocalResource;
 import org.apache.hyracks.storage.common.file.IResourceIdFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class IndexBuilder implements IIndexBuilder {
-    private static final Logger LOGGER = Logger.getLogger(IndexBuilder.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     protected final INCServiceContext ctx;
     protected final IStorageManager storageManager;
@@ -85,7 +86,7 @@
                 //The reason for this is to handle many cases such as:
                 //1. Crash while delete index is running (we don't do global cleanup on restart)
                 //2. Node leaves and then join with old data
-                LOGGER.log(Level.WARNING,
+                LOGGER.log(Level.WARN,
                         "Removing existing index on index create for the index: " + resourceRef.getRelativePath());
                 lcManager.unregister(resourceRef.getRelativePath());
                 index.destroy();
@@ -95,7 +96,7 @@
                     // This is another big problem that we need to disallow soon
                     // We can only disallow this if we have a global cleanup after crash
                     // on reboot
-                    LOGGER.log(Level.WARNING,
+                    LOGGER.log(Level.WARN,
                             "Deleting " + resourceRef.getRelativePath()
                                     + " on index create. The index is not registered"
                                     + " but the file exists in the filesystem");
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
index abfe0bb..520dbc0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
@@ -19,9 +19,6 @@
 
 package org.apache.hyracks.storage.am.common.dataflow;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.application.INCServiceContext;
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -33,10 +30,13 @@
 import org.apache.hyracks.storage.common.IResourceLifecycleManager;
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.apache.hyracks.storage.common.LocalResource;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class IndexDataflowHelper implements IIndexDataflowHelper {
 
-    private static final Logger LOGGER = Logger.getLogger(IndexDataflowHelper.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final INCServiceContext ctx;
     private final IResourceLifecycleManager<IIndex> lcManager;
     private final ILocalResourceRepository localResourceRepository;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
index ea1635a..c59c1a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
@@ -27,8 +27,6 @@
 
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -36,10 +34,12 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
 import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class IndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
 
-    private static final Logger LOGGER = Logger.getLogger(IndexDropOperatorNodePushable.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final long DROP_ATTEMPT_WAIT_TIME_MILLIS = TimeUnit.SECONDS.toMillis(1);
     private final IIndexDataflowHelper indexHelper;
     private final Set<DropOption> options;
@@ -83,11 +83,11 @@
                 return;
             } catch (HyracksDataException e) {
                 if (isIgnorable(e)) {
-                    LOGGER.log(Level.INFO, e, () -> "Ignoring exception on drop");
+                    LOGGER.info("Ignoring exception on drop", e);
                     return;
                 }
                 if (canRetry(e)) {
-                    LOGGER.log(Level.INFO, e, () -> "Retrying drop on exception");
+                    LOGGER.info( "Retrying drop on exception", e);
                     continue;
                 }
                 throw e;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
index 132149b..1e5f69b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
@@ -21,8 +21,6 @@
 import java.io.DataOutput;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.VSizeFrame;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -50,10 +48,13 @@
 import org.apache.hyracks.storage.common.IIndexCursor;
 import org.apache.hyracks.storage.common.ISearchOperationCallback;
 import org.apache.hyracks.storage.common.ISearchPredicate;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class IndexSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
 
-    static final Logger LOGGER = Logger.getLogger(IndexSearchOperatorNodePushable.class.getName());
+    static final Logger LOGGER = LogManager.getLogger();
     protected final IHyracksTaskContext ctx;
     protected final IIndexDataflowHelper indexHelper;
     protected FrameTupleAccessor accessor;
@@ -299,7 +300,7 @@
             try {
                 nonMatchWriter.writeMissing(out);
             } catch (Exception e) {
-                LOGGER.log(Level.WARNING, e.getMessage(), e);
+                LOGGER.log(Level.WARN, e.getMessage(), e);
             }
             nullTuple.addFieldEndOffset();
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
index 84a2e95..7ef4a98 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
@@ -86,5 +86,9 @@
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
index bb27236..26c7b0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
@@ -18,9 +18,6 @@
  */
 package org.apache.hyracks.storage.am.lsm.common.impls;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter;
@@ -30,10 +27,13 @@
 import org.apache.hyracks.storage.am.lsm.common.util.ComponentUtils;
 import org.apache.hyracks.storage.am.lsm.common.util.LSMComponentIdUtils;
 import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractLSMDiskComponent extends AbstractLSMComponent implements ILSMDiskComponent {
 
-    private static final Logger LOGGER = Logger.getLogger(AbstractLSMDiskComponent.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final DiskComponentMetadata metadata;
 
@@ -129,7 +129,7 @@
             // However, we cannot throw an exception here to be compatible with legacy datasets.
             // In this case, the disk component would always get a garbage Id [-1, -1], which makes the
             // component Id-based optimization useless but still correct.
-            LOGGER.warning("Component Id not found from disk component metadata");
+            LOGGER.warn("Component Id not found from disk component metadata");
         }
         return componentId;
     }
@@ -144,7 +144,7 @@
     @Override
     public void markAsValid(boolean persist) throws HyracksDataException {
         ComponentUtils.markAsValid(getMetadataHolder(), persist);
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.log(Level.INFO, "Marked as valid component with id: " + getId());
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
index dc808ad..6115ba6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
@@ -27,8 +27,6 @@
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.exceptions.ErrorCode;
@@ -71,9 +69,12 @@
 import org.apache.hyracks.storage.common.ISearchOperationCallback;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractLSMIndex implements ILSMIndex {
-    private static final Logger LOGGER = Logger.getLogger(AbstractLSMIndex.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     protected final ILSMHarness lsmHarness;
     protected final IIOManager ioManager;
     protected final ILSMIOOperationScheduler ioScheduler;
@@ -694,7 +695,7 @@
         if (opCtx.getOperation() == IndexOperation.DELETE_MEMORY_COMPONENT) {
             return EmptyComponent.INSTANCE;
         } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 FlushOperation flushOp = (FlushOperation) operation;
                 LOGGER.log(Level.INFO, "Flushing component with id: " + flushOp.getFlushingComponent().getId());
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
index 15cf8e5..c0bef7d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.storage.am.lsm.common.impls;
 
 import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter;
@@ -31,10 +29,13 @@
 import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 import org.apache.hyracks.storage.am.lsm.common.util.LSMComponentIdUtils;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public abstract class AbstractLSMMemoryComponent extends AbstractLSMComponent implements ILSMMemoryComponent {
 
-    private static final Logger LOGGER = Logger.getLogger(AbstractLSMMemoryComponent.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final IVirtualBufferCache vbc;
     private final AtomicBoolean isModified;
     private int writerCount;
@@ -280,7 +281,7 @@
             throw new IllegalStateException(
                     this + " receives illegal id. Old id " + this.componentId + ", new id " + componentId);
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.log(Level.INFO, "Component Id was reset from " + this.componentId + " to " + componentId);
         }
         this.componentId = componentId;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java
index 98c1560..ee7afa0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java
@@ -21,8 +21,6 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -37,9 +35,12 @@
 import org.apache.hyracks.storage.common.ISearchOperationCallback;
 import org.apache.hyracks.storage.common.ISearchPredicate;
 import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ComponentReplacementContext implements ILSMIndexOperationContext {
-    private static final Logger LOGGER = Logger.getLogger(ComponentReplacementContext.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private final List<ILSMComponent> components;
     private final List<ILSMComponent> diskComponents;
     private final List<ILSMComponentId> replacedComponentIds;
@@ -166,7 +167,7 @@
             }
             if (!found) {
                 // component has been merged?
-                LOGGER.log(Level.WARNING, "Memory Component with id = " + replacedComponentIds.get(i)
+                LOGGER.log(Level.WARN, "Memory Component with id = " + replacedComponentIds.get(i)
                         + " was flushed and merged before search cursor replaces it");
                 return false;
             }
@@ -198,7 +199,7 @@
                 ctx.getComponentHolder().add(swapIndexes[i], diskComponents.get(i));
             }
         } catch (Exception e) {
-            LOGGER.log(Level.WARNING, "Failure replacing memory components with disk components", e);
+            LOGGER.log(Level.WARN, "Failure replacing memory components with disk components", e);
             throw e;
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
index 20f9f6a..d9d3a07 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.storage.am.lsm.common.impls;
 
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -41,9 +39,11 @@
 import org.apache.hyracks.storage.common.IIndexCursor;
 import org.apache.hyracks.storage.common.ISearchPredicate;
 import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ExternalIndexHarness extends LSMHarness {
-    private static final Logger LOGGER = Logger.getLogger(ExternalIndexHarness.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     public ExternalIndexHarness(ILSMIndex lsmIndex, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
             boolean replicationEnabled) {
@@ -230,7 +230,7 @@
 
     @Override
     public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
         }
 
@@ -243,7 +243,7 @@
             exitComponents(ctx, LSMOperationType.MERGE, newComponent, false);
             operation.getCallback().afterFinalize(LSMIOOperationType.MERGE, newComponent);
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Finished the merge operation for index: " + lsmIndex);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 5368591..15ed0ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -24,8 +24,6 @@
 import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -58,9 +56,12 @@
 import org.apache.hyracks.storage.common.ISearchPredicate;
 import org.apache.hyracks.util.trace.ITracer;
 import org.apache.hyracks.util.trace.ITracer.Scope;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class LSMHarness implements ILSMHarness {
-    private static final Logger LOGGER = Logger.getLogger(LSMHarness.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     protected final ILSMIndex lsmIndex;
     protected final ComponentReplacementContext componentReplacementCtx;
@@ -195,8 +196,8 @@
             }
             entranceSuccessful = numEntered == components.size();
         } catch (Throwable e) { // NOSONAR: Log and re-throw
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.log(Level.SEVERE, opType.name() + " failed to enter components on " + lsmIndex, e);
+            if (LOGGER.isErrorEnabled()) {
+                LOGGER.log(Level.ERROR, opType.name() + " failed to enter components on " + lsmIndex, e);
             }
             throw e;
         } finally {
@@ -270,8 +271,8 @@
                     ctx.setAccessingComponents(false);
                     exitOperation(ctx, opType, newComponent, failedOperation);
                 } catch (Throwable e) { // NOSONAR: Log and re-throw
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.log(Level.SEVERE, e.getMessage(), e);
+                    if (LOGGER.isErrorEnabled()) {
+                        LOGGER.log(Level.ERROR, e.getMessage(), e);
                     }
                     throw e;
                 } finally {
@@ -324,8 +325,8 @@
                         c.deactivateAndDestroy();
                     }
                 } catch (Throwable e) { // NOSONAR Log and re-throw
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.log(Level.WARNING, "Failure scheduling replication or destroying merged component", e);
+                    if (LOGGER.isWarnEnabled()) {
+                        LOGGER.log(Level.WARN, "Failure scheduling replication or destroying merged component", e);
                     }
                     throw e; // NOSONAR: The last call in the finally clause
                 }
@@ -557,7 +558,7 @@
 
     @Override
     public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Started a flush operation for index: " + lsmIndex + " ...");
         }
         try {
@@ -569,8 +570,8 @@
                 newComponent.markAsValid(lsmIndex.isDurable());
             } catch (Throwable e) { // NOSONAR Log and re-throw
                 failedOperation = true;
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.log(Level.SEVERE, "Flush failed on " + lsmIndex, e);
+                if (LOGGER.isErrorEnabled()) {
+                    LOGGER.log(Level.ERROR, "Flush failed on " + lsmIndex, e);
                 }
                 throw e;
             } finally {
@@ -586,7 +587,7 @@
             opTracker.completeOperation(lsmIndex, LSMOperationType.FLUSH, ctx.getSearchOperationCallback(),
                     ctx.getModificationCallback());
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Finished the flush operation for index: " + lsmIndex);
         }
     }
@@ -618,7 +619,7 @@
 
     @Override
     public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
         }
         try {
@@ -631,8 +632,8 @@
                 newComponent.markAsValid(lsmIndex.isDurable());
             } catch (Throwable e) { // NOSONAR: Log and re-throw
                 failedOperation = true;
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.log(Level.SEVERE, "Failed merge operation on " + lsmIndex, e);
+                if (LOGGER.isErrorEnabled()) {
+                    LOGGER.log(Level.ERROR, "Failed merge operation on " + lsmIndex, e);
                 }
                 throw e;
             } finally {
@@ -658,7 +659,7 @@
             opTracker.completeOperation(lsmIndex, LSMOperationType.MERGE, ctx.getSearchOperationCallback(),
                     ctx.getModificationCallback());
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Finished the merge operation for index: " + lsmIndex);
         }
     }
@@ -751,8 +752,8 @@
                 processor.finish();
             }
         } catch (HyracksDataException e) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.log(Level.SEVERE, "Failed to process frame", e);
+            if (LOGGER.isErrorEnabled()) {
+                LOGGER.log(Level.ERROR, "Failed to process frame", e);
             }
             throw e;
         } finally {
@@ -806,8 +807,8 @@
                     opTracker.wait();
                 } catch (InterruptedException e) {
                     Thread.currentThread().interrupt();
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.log(Level.WARNING, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex,
+                    if (LOGGER.isWarnEnabled()) {
+                        LOGGER.log(Level.WARN, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex,
                                 e);
                     }
                 }
@@ -891,7 +892,7 @@
             try {
                 opTracker.wait(); // NOSONAR: OpTracker is always synchronized here
             } catch (InterruptedException e) {
-                LOGGER.log(Level.WARNING, "Interrupted while attempting component level delete", e);
+                LOGGER.log(Level.WARN, "Interrupted while attempting component level delete", e);
                 Thread.currentThread().interrupt();
                 throw HyracksDataException.create(e);
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java
index 1b827b7..3179790 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java
@@ -20,8 +20,6 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -31,9 +29,12 @@
 import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
 import org.apache.hyracks.storage.am.lsm.common.api.IComponentMetadata;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class MemoryComponentMetadata implements IComponentMetadata {
-    private static final Logger LOGGER = Logger.getLogger(MemoryComponentMetadata.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final byte[] empty = new byte[0];
     private final List<org.apache.commons.lang3.tuple.Pair<IValueReference, ArrayBackedValueStorage>> store =
             new ArrayList<>();
@@ -77,7 +78,7 @@
         LOGGER.log(Level.INFO, "Copying Metadata into a different component");
         ITreeIndexMetadataFrame frame = mdpManager.createMetadataFrame();
         for (Pair<IValueReference, ArrayBackedValueStorage> pair : store) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.log(Level.INFO, "Copying " + pair.getKey() + " : " + pair.getValue().getLength() + " bytes");
             }
             mdpManager.put(frame, pair.getKey(), pair.getValue());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
index da3e986..506dcea 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
@@ -18,15 +18,15 @@
  */
 package org.apache.hyracks.storage.am.lsm.common.impls;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class SynchronousScheduler implements ILSMIOOperationScheduler {
-    private static final Logger LOGGER = Logger.getLogger(SynchronousScheduler.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     public static final SynchronousScheduler INSTANCE = new SynchronousScheduler();
 
     private SynchronousScheduler() {
@@ -37,7 +37,7 @@
         try {
             operation.call();
         } catch (Exception e) {
-            LOGGER.log(Level.SEVERE, "IO Operation failed", e);
+            LOGGER.log(Level.ERROR, "IO Operation failed", e);
             throw HyracksDataException.create(e);
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java
index 4cfc3b6..5fa6b4f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java
@@ -19,8 +19,6 @@
 
 package org.apache.hyracks.storage.am.lsm.common.impls;
 
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IODeviceHandle;
@@ -29,10 +27,12 @@
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
 import org.apache.hyracks.util.trace.ITracer;
 import org.apache.hyracks.util.trace.ITracer.Scope;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 class TracedIOOperation implements ILSMIOOperation {
 
-    static final Logger LOGGER = Logger.getLogger(TracedIOOperation.class.getName());
+    static final Logger LOGGER = LogManager.getLogger();
 
     protected final ILSMIOOperation ioOp;
     private final LSMIOOperationType ioOpType;
@@ -128,7 +128,7 @@
         if (myIoOp instanceof Comparable && other instanceof ComparableTracedIOOperation) {
             return ((Comparable) myIoOp).compareTo(((ComparableTracedIOOperation) other).getIoOp());
         }
-        LOGGER.warning("Comparing ioOps of type " + myIoOp.getClass().getSimpleName() + " and "
+        LOGGER.warn("Comparing ioOps of type " + myIoOp.getClass().getSimpleName() + " and "
                 + other.getClass().getSimpleName() + " in " + getClass().getSimpleName());
         return Integer.signum(hashCode() - other.hashCode());
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java
index 3a22793..d192351 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java
@@ -25,8 +25,6 @@
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.ReentrantLock;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -42,9 +40,12 @@
 import org.apache.hyracks.storage.common.file.FileMapManager;
 import org.apache.hyracks.storage.common.file.IFileMapManager;
 import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class VirtualBufferCache implements IVirtualBufferCache {
-    private static final Logger LOGGER = Logger.getLogger(VirtualBufferCache.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private final ICacheMemoryAllocator allocator;
     private final IFileMapManager fileMapManager;
@@ -125,7 +126,7 @@
     }
 
     private void logStats() {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.log(Level.INFO, "Free (allocated) pages = " + freePages.size() + ". Budget = " + pageBudget
                     + ". Large pages = " + largePages.get() + ". Overall usage = " + used.get());
         }
@@ -187,7 +188,7 @@
                 bucket.bucketLock.unlock();
             }
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.log(Level.INFO, "Reclaimed pages = " + reclaimedPages);
         }
         logStats();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java
index 0097a37..94a3702 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java
@@ -19,8 +19,6 @@
 package org.apache.hyracks.storage.am.lsm.common.util;
 
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IPointable;
@@ -34,10 +32,13 @@
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMMemoryComponent;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ComponentUtils {
 
-    private static final Logger LOGGER = Logger.getLogger(ComponentUtils.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     public static final MutableArrayValueReference MARKER_LSN_KEY = new MutableArrayValueReference("Marker".getBytes());
     public static final long NOT_FOUND = -1L;
 
@@ -76,40 +77,40 @@
      * @throws HyracksDataException
      */
     public static void get(ILSMIndex index, IValueReference key, IPointable pointable) throws HyracksDataException {
-        boolean loggable = LOGGER.isLoggable(Level.FINE);
+        boolean loggable = LOGGER.isDebugEnabled();
         if (loggable) {
-            LOGGER.log(Level.FINE, "Getting " + key + " from index " + index);
+            LOGGER.log(Level.DEBUG, "Getting " + key + " from index " + index);
         }
         // Lock the opTracker to ensure index components don't change
         synchronized (index.getOperationTracker()) {
             index.getCurrentMemoryComponent().getMetadata().get(key, pointable);
             if (pointable.getLength() == 0) {
                 if (loggable) {
-                    LOGGER.log(Level.FINE, key + " was not found in mutable memory component of " + index);
+                    LOGGER.log(Level.DEBUG, key + " was not found in mutable memory component of " + index);
                 }
                 // was not found in the in current mutable component, search in the other in memory components
                 fromImmutableMemoryComponents(index, key, pointable);
                 if (pointable.getLength() == 0) {
                     if (loggable) {
-                        LOGGER.log(Level.FINE, key + " was not found in all immmutable memory components of " + index);
+                        LOGGER.log(Level.DEBUG, key + " was not found in all immmutable memory components of " + index);
                     }
                     // was not found in the in all in memory components, search in the disk components
                     fromDiskComponents(index, key, pointable);
                     if (loggable) {
                         if (pointable.getLength() == 0) {
-                            LOGGER.log(Level.FINE, key + " was not found in all disk components of " + index);
+                            LOGGER.log(Level.DEBUG, key + " was not found in all disk components of " + index);
                         } else {
-                            LOGGER.log(Level.FINE, key + " was found in disk components of " + index);
+                            LOGGER.log(Level.DEBUG, key + " was found in disk components of " + index);
                         }
                     }
                 } else {
                     if (loggable) {
-                        LOGGER.log(Level.FINE, key + " was found in the immutable memory components of " + index);
+                        LOGGER.log(Level.DEBUG, key + " was found in the immutable memory components of " + index);
                     }
                 }
             } else {
                 if (loggable) {
-                    LOGGER.log(Level.FINE, key + " was found in mutable memory component of " + index);
+                    LOGGER.log(Level.DEBUG, key + " was found in mutable memory component of " + index);
                 }
             }
         }
@@ -135,13 +136,13 @@
 
     private static void fromDiskComponents(ILSMIndex index, IValueReference key, IPointable pointable)
             throws HyracksDataException {
-        boolean loggable = LOGGER.isLoggable(Level.FINE);
+        boolean loggable = LOGGER.isDebugEnabled();
         if (loggable) {
-            LOGGER.log(Level.FINE, "Getting " + key + " from disk components of " + index);
+            LOGGER.log(Level.DEBUG, "Getting " + key + " from disk components of " + index);
         }
         for (ILSMDiskComponent c : index.getDiskComponents()) {
             if (loggable) {
-                LOGGER.log(Level.FINE, "Getting " + key + " from disk components " + c);
+                LOGGER.log(Level.DEBUG, "Getting " + key + " from disk components " + c);
             }
             c.getMetadata().get(key, pointable);
             if (pointable.getLength() != 0) {
@@ -152,19 +153,20 @@
     }
 
     private static void fromImmutableMemoryComponents(ILSMIndex index, IValueReference key, IPointable pointable) {
-        boolean loggable = LOGGER.isLoggable(Level.FINE);
+        boolean loggable = LOGGER.isDebugEnabled();
         if (loggable) {
-            LOGGER.log(Level.FINE, "Getting " + key + " from immutable memory components of " + index);
+            LOGGER.log(Level.DEBUG, "Getting " + key + " from immutable memory components of " + index);
         }
         List<ILSMMemoryComponent> memComponents = index.getMemoryComponents();
         int numOtherMemComponents = memComponents.size() - 1;
         int next = index.getCurrentMemoryComponentIndex();
         if (loggable) {
-            LOGGER.log(Level.FINE, index + " has " + numOtherMemComponents + " immutable memory components");
+            LOGGER.log(Level.DEBUG, index + " has " + numOtherMemComponents + " immutable memory components");
         }
         for (int i = 0; i < numOtherMemComponents; i++) {
             if (loggable) {
-                LOGGER.log(Level.FINE, "trying to get " + key + " from immutable memory components number: " + (i + 1));
+                LOGGER.log(Level.DEBUG,
+                        "trying to get " + key + " from immutable memory components number: " + (i + 1));
             }
             next = next - 1;
             if (next < 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java
index 0aeb0b9..9bd873d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java
@@ -18,14 +18,14 @@
  */
 package org.apache.hyracks.storage.am.lsm.common.util;
 
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class IOOperationUtils {
-    private static final Logger LOGGER = Logger.getLogger(IOOperationUtils.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private IOOperationUtils() {
     }
@@ -35,7 +35,7 @@
         try {
             ioCallback.waitForIO();
         } catch (InterruptedException e) {
-            LOGGER.log(Level.WARNING, "Operation has been interrupted. returning");
+            LOGGER.log(Level.WARN, "Operation has been interrupted. returning");
             Thread.currentThread().interrupt();
             throw HyracksDataException.create(e);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
index acd2e49..9998e97 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
@@ -88,5 +88,9 @@
       <artifactId>hyracks-storage-am-bloomfilter</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
index 030e4fd..01f0f1a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
@@ -20,8 +20,6 @@
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -77,9 +75,12 @@
 import org.apache.hyracks.storage.common.MultiComparator;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class LSMInvertedIndex extends AbstractLSMIndex implements IInvertedIndex {
-    private static final Logger LOGGER = Logger.getLogger(LSMInvertedIndex.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     protected final IBinaryTokenizerFactory tokenizerFactory;
 
@@ -178,7 +179,7 @@
                 } catch (HyracksDataException e) {
                     if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
                         // Key has already been deleted.
-                        LOGGER.log(Level.WARNING, "Failure during index delete operation", e);
+                        LOGGER.log(Level.WARN, "Failure during index delete operation", e);
                         throw e;
                     }
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
index a699075..2a40e4c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
@@ -51,5 +51,9 @@
       <artifactId>hyracks-dataflow-common</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index 7167565..6212896 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -36,8 +36,6 @@
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -49,10 +47,13 @@
 import org.apache.hyracks.api.util.IoUtil;
 import org.apache.hyracks.storage.common.file.BufferedFileHandle;
 import org.apache.hyracks.storage.common.file.IFileMapManager;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class BufferCache implements IBufferCacheInternal, ILifeCycleComponent {
 
-    private static final Logger LOGGER = Logger.getLogger(BufferCache.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final int MAP_FACTOR = 3;
 
     private static final int MIN_CLEANED_COUNT_DIFF = 3;
@@ -76,7 +77,7 @@
     private final Queue<BufferCacheHeaderHelper> headerPageCache = new ConcurrentLinkedQueue<>();
 
     //DEBUG
-    private Level fileOpsLevel = Level.FINE;
+    private Level fileOpsLevel = Level.DEBUG;
     private ArrayList<CachedPage> confiscatedPages;
     private Lock confiscateLock;
     private HashMap<CachedPage, StackTraceElement[]> confiscatedPagesOwner;
@@ -517,7 +518,7 @@
                      */
                     try {
                         Thread.sleep(PERIOD_BETWEEN_READ_ATTEMPTS);
-                        LOGGER.log(Level.WARNING, String.format("Failed to read page. Retrying attempt (%d/%d)", i + 1,
+                        LOGGER.log(Level.WARN, String.format("Failed to read page. Retrying attempt (%d/%d)", i + 1,
                                 MAX_PAGE_READ_ATTEMPTS), readException);
                     } catch (InterruptedException e) {
                         Thread.currentThread().interrupt();
@@ -705,7 +706,7 @@
             try {
                 write(cPage);
             } catch (HyracksDataException e) {
-                LOGGER.log(Level.WARNING, "Unable to write dirty page", e);
+                LOGGER.log(Level.WARN, "Unable to write dirty page", e);
                 cleaned = false;
             }
             if (cleaned) {
@@ -782,8 +783,8 @@
                         ioManager.close(value.getFileHandle());
                     }
                 } catch (HyracksDataException e) {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.log(Level.WARNING, "Error flushing file id: " + key, e);
+                    if (LOGGER.isWarnEnabled()) {
+                        LOGGER.log(Level.WARN, "Error flushing file id: " + key, e);
                     }
                 }
             });
@@ -793,7 +794,7 @@
 
     @Override
     public int createFile(FileReference fileRef) throws HyracksDataException {
-        if (LOGGER.isLoggable(fileOpsLevel)) {
+        if (LOGGER.isEnabled(fileOpsLevel)) {
             LOGGER.log(fileOpsLevel, "Creating file: " + fileRef + " in cache: " + this);
         }
         IoUtil.create(fileRef);
@@ -814,7 +815,7 @@
 
     @Override
     public int openFile(FileReference fileRef) throws HyracksDataException {
-        if (LOGGER.isLoggable(fileOpsLevel)) {
+        if (LOGGER.isEnabled(fileOpsLevel)) {
             LOGGER.log(fileOpsLevel, "Opening file: " + fileRef + " in cache: " + this);
         }
         int fileId;
@@ -831,7 +832,7 @@
 
     @Override
     public void openFile(int fileId) throws HyracksDataException {
-        if (LOGGER.isLoggable(fileOpsLevel)) {
+        if (LOGGER.isEnabled(fileOpsLevel)) {
             LOGGER.log(fileOpsLevel, "Opening file: " + fileId + " in cache: " + this);
         }
         synchronized (fileInfoMap) {
@@ -929,11 +930,11 @@
 
     @Override
     public void closeFile(int fileId) throws HyracksDataException {
-        if (LOGGER.isLoggable(fileOpsLevel)) {
+        if (LOGGER.isEnabled(fileOpsLevel)) {
             LOGGER.log(fileOpsLevel, "Closing file: " + fileId + " in cache: " + this);
         }
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine(dumpState());
+        if (LOGGER.isDebugEnabled()) {
+            LOGGER.debug(dumpState());
         }
 
         synchronized (fileInfoMap) {
@@ -945,7 +946,7 @@
                 throw new HyracksDataException("Closed fileId: " + fileId + " more times than it was opened.");
             }
         }
-        if (LOGGER.isLoggable(fileOpsLevel)) {
+        if (LOGGER.isEnabled(fileOpsLevel)) {
             LOGGER.log(fileOpsLevel, "Closed file: " + fileId + " in cache: " + this);
         }
     }
@@ -980,7 +981,7 @@
 
     @Override
     public void deleteFile(int fileId) throws HyracksDataException {
-        if (LOGGER.isLoggable(fileOpsLevel)) {
+        if (LOGGER.isEnabled(fileOpsLevel)) {
             LOGGER.log(fileOpsLevel, "Deleting file: " + fileId + " in cache: " + this);
         }
         synchronized (fileInfoMap) {
@@ -1306,8 +1307,8 @@
                 }
             }
         } finally {
-            if (cycleCount > PIN_ATTEMPT_CYCLES_WARNING_THRESHOLD && LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Took " + cycleCount + " cycles to find free page in buffer cache.  (buffer cache "
+            if (cycleCount > PIN_ATTEMPT_CYCLES_WARNING_THRESHOLD && LOGGER.isWarnEnabled()) {
+                LOGGER.warn("Took " + cycleCount + " cycles to find free page in buffer cache.  (buffer cache "
                         + "undersized?)" + (DEBUG
                                 ? " ; " + (masterPinCount.get() - startingPinCount)
                                         + " successful pins since start of cycle"
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
index 872ac35..a6a3bc8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
@@ -22,13 +22,13 @@
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class ClockPageReplacementStrategy implements IPageReplacementStrategy {
-    private static final Logger LOGGER = Logger.getLogger(ClockPageReplacementStrategy.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private static final int MAX_UNSUCCESSFUL_CYCLE_COUNT = 3;
 
     private IBufferCacheInternal bufferCache;
@@ -130,8 +130,8 @@
             }
             if (looped && clockPtr >= startClockPtr) {
                 cycleCount++;
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.fine("completed " + cycleCount + "/" + MAX_UNSUCCESSFUL_CYCLE_COUNT
+                if (LOGGER.isDebugEnabled()) {
+                    LOGGER.debug("completed " + cycleCount + "/" + MAX_UNSUCCESSFUL_CYCLE_COUNT
                             + " clock cycle(s) without finding victim");
                 }
                 if (cycleCount >= MAX_UNSUCCESSFUL_CYCLE_COUNT) {
@@ -219,8 +219,8 @@
                 }
             } else {
                 // we don't have the budget to resize- proceed anyway, but log
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Exceeding buffer cache budget of " + maxAllowedNumPages + " by "
+                if (LOGGER.isWarnEnabled()) {
+                    LOGGER.warn("Exceeding buffer cache budget of " + maxAllowedNumPages + " by "
                             + (numPages.get() + delta - maxAllowedNumPages)
                             + " pages in order to satisfy large page read");
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
index 104e943..6559308 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
@@ -104,5 +104,9 @@
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-lang3</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
index fbd3950..2a7b978 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
@@ -22,8 +22,6 @@
 import static org.junit.Assert.fail;
 
 import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -51,11 +49,13 @@
 import org.apache.hyracks.storage.common.IIndexBulkLoader;
 import org.apache.hyracks.storage.common.IIndexCursor;
 import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 @SuppressWarnings("rawtypes")
 public abstract class OrderedIndexExamplesTest {
-    protected static final Logger LOGGER = Logger.getLogger(OrderedIndexExamplesTest.class.getName());
+    protected static final Logger LOGGER = LogManager.getLogger();
     protected final Random rnd = new Random(50);
 
     protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
@@ -69,7 +69,7 @@
      */
     @Test
     public void fixedLengthKeyValueExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Fixed-Length Key,Value Example.");
         }
 
@@ -96,7 +96,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -109,7 +109,7 @@
             int f0 = rnd.nextInt() % numInserts;
             int f1 = 5;
             TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 1000 == 0) {
                     LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
                 }
@@ -123,7 +123,7 @@
             }
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
@@ -157,7 +157,7 @@
      */
     @Test
     public void pageSplitTestExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree page split test.");
         }
 
@@ -232,7 +232,7 @@
      */
     @Test
     public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Composite Key Test");
         }
 
@@ -262,7 +262,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -276,7 +276,7 @@
             int f1 = rnd.nextInt() % 1000;
             int f2 = 5;
             TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 1000 == 0) {
                     LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
                 }
@@ -290,7 +290,7 @@
             }
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
@@ -322,7 +322,7 @@
      */
     @Test
     public void varLenKeyValueExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Variable-Length Key,Value Example");
         }
 
@@ -349,7 +349,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -364,7 +364,7 @@
             String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
             String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
             TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 1000 == 0) {
                     LOGGER.info("Inserting[" + i + "] " + f0 + " " + f1);
                 }
@@ -378,7 +378,7 @@
             }
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
@@ -410,7 +410,7 @@
      */
     @Test
     public void deleteExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Deletion Example");
         }
 
@@ -444,7 +444,7 @@
         // Max string length to be generated.
         int runs = 3;
         for (int run = 0; run < runs; run++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
                 LOGGER.info("Inserting into tree...");
             }
@@ -460,7 +460,7 @@
                 TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
                 f0s[i] = f0;
                 f1s[i] = f1;
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if (i % 1000 == 0) {
                         LOGGER.info("Inserting " + i);
                     }
@@ -476,13 +476,13 @@
                 insDoneCmp[i] = insDone;
             }
 
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Deleting from tree...");
             }
             int delDone = 0;
             for (int i = 0; i < ins; i++) {
                 TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if (i % 1000 == 0) {
                         LOGGER.info("Deleting " + i);
                     }
@@ -496,7 +496,7 @@
                     }
                 }
                 if (insDoneCmp[i] != delDone) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
+                    if (LOGGER.isInfoEnabled()) {
                         LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
                         LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
                     }
@@ -504,7 +504,7 @@
                 }
             }
             if (insDone != delDone) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
                 }
                 break;
@@ -523,7 +523,7 @@
      */
     @Test
     public void updateExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Update example");
         }
 
@@ -549,7 +549,7 @@
         treeIndex.create();
         treeIndex.activate();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         IndexAccessParameters actx =
@@ -565,7 +565,7 @@
             String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
             TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
             keys[i] = f0;
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 1000 == 0) {
                     LOGGER.info("Inserting " + i);
                 }
@@ -583,7 +583,7 @@
 
         int runs = 3;
         for (int run = 0; run < runs; run++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
                 LOGGER.info("Updating BTree");
             }
@@ -591,7 +591,7 @@
                 // Generate a new random value for f1.
                 String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
                 TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if (i % 1000 == 0) {
                         LOGGER.info("Updating " + i);
                     }
@@ -612,7 +612,7 @@
      */
     @Test
     public void bulkLoadExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Bulk load example");
         }
         // Declare fields.
@@ -642,7 +642,7 @@
 
         // Load sorted records.
         int ins = 100000;
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Bulk loading " + ins + " tuples");
         }
         long start = System.currentTimeMillis();
@@ -655,7 +655,7 @@
         }
         bulkLoader.end();
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
         }
 
@@ -688,7 +688,7 @@
      */
     @Test
     public void bulkOrderVerificationExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Bulk load order verification example");
         }
         // Declare fields.
@@ -757,7 +757,7 @@
     }
 
     protected void orderedScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Ordered Scan:");
         }
         IIndexCursor scanCursor = indexAccessor.createSearchCursor(false);
@@ -768,7 +768,7 @@
                 scanCursor.next();
                 ITupleReference frameTuple = scanCursor.getTuple();
                 String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(rec);
                 }
             }
@@ -779,7 +779,7 @@
 
     protected void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
         try {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Disk-Order Scan:");
             }
             ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
@@ -791,7 +791,7 @@
                     diskOrderCursor.next();
                     ITupleReference frameTuple = diskOrderCursor.getTuple();
                     String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                    if (LOGGER.isLoggable(Level.INFO)) {
+                    if (LOGGER.isInfoEnabled()) {
                         LOGGER.info(rec);
                     }
                 }
@@ -801,13 +801,13 @@
         } catch (UnsupportedOperationException e) {
             // Ignore exception because some indexes, e.g. the LSMBTree, don't
             // support disk-order scan.
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring disk-order scan since it's not supported.");
             }
         } catch (ClassCastException e) {
             // Ignore exception because IIndexAccessor sometimes isn't
             // an ITreeIndexAccessor, e.g., for the LSMBTree.
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring disk-order scan since it's not supported.");
             }
         }
@@ -816,7 +816,7 @@
     protected void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
             ISerializerDeserializer[] fieldSerdes, ITupleReference lowKey, ITupleReference highKey,
             ITupleReference minFilterTuple, ITupleReference maxFilterTuple) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
             String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
             LOGGER.info("Range-Search in: [ " + lowKeyString + ", " + highKeyString + "]");
@@ -837,7 +837,7 @@
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
                 String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(rec);
                 }
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
index d73439d..90c64cb 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.storage.am.btree;
 
 import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -35,12 +33,14 @@
 import org.apache.hyracks.storage.am.common.TestWorkloadConf;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.common.IIndex;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 @SuppressWarnings("rawtypes")
 public abstract class OrderedIndexMultiThreadTest {
 
-    protected final Logger LOGGER = Logger.getLogger(OrderedIndexMultiThreadTest.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     // Machine-specific number of threads to use for testing.
     protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
@@ -67,7 +67,7 @@
             String dataMsg) throws InterruptedException, HyracksDataException {
         setUp();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             String indexTypeName = getIndexTypeName();
             LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
                     + "; Workload: " + conf.toString() + ".");
@@ -94,7 +94,7 @@
         index.validate();
         driver.deinit();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree MultiThread Test Time: " + times[0] + "ms");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
index 84a5df6..3dac0db 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
@@ -20,9 +20,9 @@
 package org.apache.hyracks.storage.am.btree;
 
 import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -35,7 +35,7 @@
 
 @SuppressWarnings("rawtypes")
 public abstract class OrderedIndexTestDriver {
-    protected final Logger LOGGER = Logger.getLogger(OrderedIndexTestDriver.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     protected static final int numTuplesToInsert = AccessMethodTestsConfig.BTREE_NUM_TUPLES_TO_INSERT;
 
@@ -58,7 +58,7 @@
 
     @Test
     public void oneIntKeyAndValue() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
         }
 
@@ -75,7 +75,7 @@
 
     @Test
     public void twoIntKeys() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
         }
 
@@ -97,7 +97,7 @@
 
     @Test
     public void twoIntKeysAndValues() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
         }
 
@@ -120,7 +120,7 @@
 
     @Test
     public void oneStringKeyAndValue() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
         }
 
@@ -138,7 +138,7 @@
 
     @Test
     public void twoStringKeys() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
         }
 
@@ -160,7 +160,7 @@
 
     @Test
     public void twoStringKeysAndValues() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
index 1c408fc..665178c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -28,12 +28,9 @@
 import java.util.Random;
 import java.util.SortedSet;
 import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.MutablePair;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.ErrorCode;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -52,10 +49,12 @@
 import org.apache.hyracks.storage.common.IIndexCursor;
 import org.apache.hyracks.storage.common.ISearchPredicate;
 import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 @SuppressWarnings("rawtypes")
 public class OrderedIndexTestUtils extends TreeIndexTestUtils {
-    private static final Logger LOGGER = Logger.getLogger(OrderedIndexTestUtils.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
             ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
@@ -95,7 +94,7 @@
     @SuppressWarnings("unchecked")
     public void checkRangeSearch(IIndexTestContext ctx, ITupleReference lowKey, ITupleReference highKey,
             boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Testing Range Search.");
         }
         MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
@@ -143,7 +142,7 @@
     }
 
     public void checkPointSearches(IIndexTestContext ictx) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Testing Point Searches On All Expected Keys.");
         }
         OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
@@ -254,7 +253,7 @@
 
         int c = 1;
         for (CheckTuple checkTuple : checkTuples) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (c % (numTuples / 10) == 0) {
                     LOGGER.info("Inserting Tuple " + c + "/" + numTuples);
                 }
@@ -278,7 +277,7 @@
         String[] fieldValues = new String[fieldCount];
         MutablePair<ITupleReference, ITupleReference> minMax = null;
         for (int i = 0; i < numTuples; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
                 }
@@ -318,7 +317,7 @@
         int numKeyFields = ctx.getKeyFieldCount();
         String[] fieldValues = new String[fieldCount];
         for (int i = 0; i < numTuples; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
                 }
@@ -387,7 +386,7 @@
             // Set values.
             setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
             TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
                 }
@@ -417,7 +416,7 @@
             checkTuples[idx++] = checkTuple;
         }
         for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
index 0a7f4db..9304adf 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -28,8 +28,6 @@
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.tuple.MutablePair;
 import org.apache.commons.lang3.tuple.Pair;
@@ -44,10 +42,13 @@
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 import org.apache.hyracks.storage.common.IIndexBulkLoader;
 import org.apache.hyracks.storage.common.ISearchPredicate;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 @SuppressWarnings("rawtypes")
 public abstract class TreeIndexTestUtils {
-    private static final Logger LOGGER = Logger.getLogger(TreeIndexTestUtils.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     protected abstract CheckTuple createCheckTuple(int numFields, int numKeyFields);
 
@@ -116,7 +117,7 @@
 
     @SuppressWarnings("unchecked")
     public void checkScan(IIndexTestContext ctx) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Testing Scan.");
         }
         ITreeIndexCursor scanCursor = (ITreeIndexCursor) ctx.getIndexAccessor().createSearchCursor(false);
@@ -128,7 +129,7 @@
 
     public void checkDiskOrderScan(IIndexTestContext ctx) throws Exception {
         try {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Testing Disk-Order Scan.");
             }
             ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) ctx.getIndexAccessor();
@@ -158,19 +159,19 @@
                 try {
                     diskOrderCursor.close();
                 } catch (Exception ex) {
-                    LOGGER.log(Level.WARNING, "Error during scan cursor close", ex);
+                    LOGGER.log(Level.WARN, "Error during scan cursor close", ex);
                 }
             }
         } catch (UnsupportedOperationException e) {
             // Ignore exception because some indexes, e.g. the LSMTrees, don't
             // support disk-order scan.
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring disk-order scan since it's not supported.");
             }
         } catch (ClassCastException e) {
             // Ignore exception because IIndexAccessor sometimes isn't
             // an ITreeIndexAccessor, e.g., for the LSMBTree.
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring disk-order scan since it's not supported.");
             }
         }
@@ -199,7 +200,7 @@
             // Set values.
             setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
             TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), filtered, fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
                 }
@@ -256,7 +257,7 @@
             // Set values.
             setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
             TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
                 }
@@ -319,7 +320,7 @@
         IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples, false);
         int c = 1;
         for (CheckTuple checkTuple : checkTuples) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 //if (c % (numTuples / 10) == 0) {
                 LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
                 //}
@@ -347,7 +348,7 @@
         }
 
         for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
index d6358e3..4a31cd6 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.storage.am.rtree;
 
 import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -53,6 +51,8 @@
 import org.apache.hyracks.storage.common.IIndexAccessor;
 import org.apache.hyracks.storage.common.IIndexBulkLoader;
 import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 @SuppressWarnings("rawtypes")
@@ -64,7 +64,7 @@
         RTREE
     };
 
-    protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeExamplesTest.class.getName());
+    protected static final Logger LOGGER = LogManager.getLogger();
     protected final Random rnd = new Random(50);
     protected RTreeType rTreeType;
 
@@ -82,7 +82,7 @@
      */
     @Test
     public void twoDimensionsExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Fixed-Length Key,Value Example.");
         }
 
@@ -145,7 +145,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -172,7 +172,7 @@
             }
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
@@ -198,7 +198,7 @@
      */
     @Test
     public void rTreePageSplitTestExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree page split test.");
         }
 
@@ -339,7 +339,7 @@
      */
     @Test
     public void rStarTreePageSplitTestExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("R*Tree page split test.");
         }
 
@@ -482,7 +482,7 @@
      */
     @Test
     public void threeDimensionsExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Fixed-Length Key,Value Example.");
         }
 
@@ -549,7 +549,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -577,7 +577,7 @@
             }
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
@@ -603,7 +603,7 @@
      */
     @Test
     public void deleteExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Deletion Example");
         }
 
@@ -663,7 +663,7 @@
 
         int runs = 3;
         for (int run = 0; run < runs; run++) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
                 LOGGER.info("Inserting into tree...");
             }
@@ -702,7 +702,7 @@
                 insDoneCmp[i] = insDone;
             }
 
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Deleting from tree...");
             }
             int delDone = 0;
@@ -717,7 +717,7 @@
                     }
                 }
                 if (insDoneCmp[i] != delDone) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
+                    if (LOGGER.isInfoEnabled()) {
                         LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
                         LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
                     }
@@ -725,7 +725,7 @@
                 }
             }
             if (insDone != delDone) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
                 }
                 break;
@@ -740,7 +740,7 @@
      */
     @Test
     public void bulkLoadExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Bulk load example");
         }
         // Declare fields.
@@ -801,7 +801,7 @@
 
         // Load records.
         int numInserts = 10000;
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Bulk loading " + numInserts + " tuples");
         }
         long start = System.currentTimeMillis();
@@ -824,7 +824,7 @@
 
         bulkLoader.end();
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
         }
 
@@ -842,7 +842,7 @@
     }
 
     protected void scan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Scan:");
         }
         ITreeIndexCursor scanCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
@@ -853,7 +853,7 @@
                 scanCursor.next();
                 ITupleReference frameTuple = scanCursor.getTuple();
                 String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(rec);
                 }
             }
@@ -864,7 +864,7 @@
 
     protected void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
         try {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Disk-Order Scan:");
             }
             ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
@@ -876,7 +876,7 @@
                     diskOrderCursor.next();
                     ITupleReference frameTuple = diskOrderCursor.getTuple();
                     String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                    if (LOGGER.isLoggable(Level.INFO)) {
+                    if (LOGGER.isInfoEnabled()) {
                         LOGGER.info(rec);
                     }
                 }
@@ -886,13 +886,13 @@
         } catch (UnsupportedOperationException e) {
             // Ignore exception because some indexes, e.g. the LSMRTree, don't
             // support disk-order scan.
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring disk-order scan since it's not supported.");
             }
         } catch (ClassCastException e) {
             // Ignore exception because IIndexAccessor sometimes isn't
             // an ITreeIndexAccessor, e.g., for the LSMRTree.
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring disk-order scan since it's not supported.");
             }
         }
@@ -901,7 +901,7 @@
     protected void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
             ISerializerDeserializer[] fieldSerdes, ITupleReference key, ITupleReference minFilterTuple,
             ITupleReference maxFilterTuple) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             String kString = TupleUtils.printTuple(key, fieldSerdes);
             LOGGER.info("Range-Search using key: " + kString);
         }
@@ -921,7 +921,7 @@
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
                 String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(rec);
                 }
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
index 53245ac..cf0e1e4 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.storage.am.rtree;
 
 import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -41,6 +39,8 @@
 import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest.RTreeType;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
 import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 @SuppressWarnings("rawtypes")
@@ -54,7 +54,7 @@
         this.rTreeType = rTreeType;
     }
 
-    protected final Logger LOGGER = Logger.getLogger(AbstractRTreeMultiThreadTest.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     // Machine-specific number of threads to use for testing.
     protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
@@ -84,7 +84,7 @@
             int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException {
         setUp();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             String indexTypeName = getIndexTypeName();
             LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
                     + "; Workload: " + conf.toString() + ".");
@@ -120,7 +120,7 @@
         long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
         driver.deinit();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree MultiThread Test Time: " + times[0] + "ms");
         }
 
@@ -193,7 +193,7 @@
     @Test
     public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException {
         if (!testRstarPolicy) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Integer Keys.");
             }
             return;
@@ -220,7 +220,7 @@
     @Test
     public void rstartreeTwoDimensionsDouble() throws Exception {
         if (!testRstarPolicy) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Double Keys.");
             }
             return;
@@ -248,7 +248,7 @@
     @Test
     public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException {
         if (!testRstarPolicy) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring RTree Multithread Test With Four Dimensions With Double Keys.");
             }
             return;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
index 17e4c09..1f71889 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
@@ -20,10 +20,6 @@
 package org.apache.hyracks.storage.am.rtree;
 
 import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.data.std.primitive.DoublePointable;
@@ -36,6 +32,9 @@
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
 import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.Test;
 
 @SuppressWarnings("rawtypes")
 public abstract class AbstractRTreeTestDriver {
@@ -45,7 +44,7 @@
         this.testRstarPolicy = testRstarPolicy;
     }
 
-    protected final Logger LOGGER = Logger.getLogger(AbstractRTreeTestDriver.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     protected static final int numTuplesToInsert = AccessMethodTestsConfig.RTREE_NUM_TUPLES_TO_INSERT;
 
@@ -63,7 +62,7 @@
 
     @Test
     public void rtreeTwoDimensionsInt() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
         }
 
@@ -84,7 +83,7 @@
 
     @Test
     public void rtreeTwoDimensionsDouble() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
         }
 
@@ -105,7 +104,7 @@
 
     @Test
     public void rtreeFourDimensionsDouble() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
         }
 
@@ -130,12 +129,12 @@
     @Test
     public void rstartreeTwoDimensionsInt() throws Exception {
         if (!testRstarPolicy) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
             }
             return;
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
         }
 
@@ -157,12 +156,12 @@
     @Test
     public void rstartreeTwoDimensionsDouble() throws Exception {
         if (!testRstarPolicy) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
             }
             return;
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
         }
 
@@ -184,12 +183,12 @@
     @Test
     public void rstartreeFourDimensionsDouble() throws Exception {
         if (!testRstarPolicy) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
             }
             return;
         }
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
index 5a890a4..eb4ea56 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -24,8 +24,6 @@
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.ErrorCode;
@@ -42,10 +40,12 @@
 import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
 import org.apache.hyracks.storage.common.ISearchPredicate;
 import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 @SuppressWarnings("rawtypes")
 public class RTreeTestUtils extends TreeIndexTestUtils {
-    private static final Logger LOGGER = Logger.getLogger(RTreeTestUtils.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
     private int intPayloadValue = 0;
     private double doublePayloadValue = 0.0;
 
@@ -65,7 +65,7 @@
     }
 
     public void checkRangeSearch(IIndexTestContext ictx, ITupleReference key) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Testing Range Search.");
         }
         AbstractRTreeTestContext ctx = (AbstractRTreeTestContext) ictx;
@@ -101,7 +101,7 @@
             // Set values.
             setDoublePayloadFields(fieldValues, numKeyFields, fieldCount);
             TupleUtils.createDoubleTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                     LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
                 }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
index 43b0957..1ff64d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
@@ -75,5 +75,9 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java
index 24b1122..c6652c3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java
@@ -53,7 +53,7 @@
 
     @Test
     public void singleFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING BLOOM FILTER");
         }
 
@@ -111,7 +111,7 @@
 
     @Test
     public void multiFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING BLOOM FILTER");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
index 9d1b9be..b7b4639e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
@@ -21,7 +21,6 @@
 
 import java.nio.ByteBuffer;
 import java.util.Random;
-import java.util.logging.Level;
 
 import org.junit.Assert;
 import org.junit.Before;
@@ -50,7 +49,7 @@
 
     @Test
     public void murmurhashONEIntegerFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING MURMUR HASH ONE INTEGER FIELD");
         }
 
@@ -77,7 +76,7 @@
 
     @Test
     public void murmurhashTwoIntegerFieldsTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING MURMUR HASH TWO INTEGER FIELDS");
         }
 
@@ -104,7 +103,7 @@
 
     @Test
     public void murmurhashOneStringFieldTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING MURMUR HASH ONE STRING FIELD");
         }
 
@@ -132,7 +131,7 @@
 
     @Test
     public void murmurhashThreeStringFieldsTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING MURMUR HASH THREE STRING FIELDS");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
index fdcd96b..b0644e3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
@@ -20,15 +20,16 @@
 package org.apache.hyracks.storage.am.bloomfilter.util;
 
 import java.util.Random;
-import java.util.logging.Logger;
 
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.After;
 import org.junit.Before;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public abstract class AbstractBloomFilterTest {
-    protected final Logger LOGGER = Logger.getLogger(BloomFilterTestHarness.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     protected final BloomFilterTestHarness harness;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index 0ddceac..63b1a38 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -85,5 +85,9 @@
       <artifactId>hyracks-data-std</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
index 0a714cf..2943ee9 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
@@ -26,7 +26,6 @@
 import java.util.Collections;
 import java.util.Random;
 import java.util.TreeSet;
-import java.util.logging.Level;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -81,7 +80,7 @@
 
     @Test
     public void uniqueIndexTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
         }
 
@@ -157,7 +156,7 @@
 
     @Test
     public void nonUniqueIndexTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
         }
 
@@ -231,7 +230,7 @@
 
     @Test
     public void nonUniqueFieldPrefixIndexTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
         }
 
@@ -400,14 +399,14 @@
                             u = ')';
                         }
 
-                        if (LOGGER.isLoggable(Level.INFO)) {
+                        if (LOGGER.isInfoEnabled()) {
                             LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
                         }
                         StringBuilder strBuilder = new StringBuilder();
                         for (Integer r : expectedResults) {
                             strBuilder.append(r + " ");
                         }
-                        if (LOGGER.isLoggable(Level.INFO)) {
+                        if (LOGGER.isInfoEnabled()) {
                             LOGGER.info(strBuilder.toString());
                         }
                     }
@@ -416,7 +415,7 @@
                 if (results.size() == expectedResults.size()) {
                     for (int k = 0; k < results.size(); k++) {
                         if (!results.get(k).equals(expectedResults.get(k))) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
+                            if (LOGGER.isInfoEnabled()) {
                                 LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
                                 LOGGER.info(results.get(k) + " " + expectedResults.get(k));
                             }
@@ -424,7 +423,7 @@
                         }
                     }
                 } else {
-                    if (LOGGER.isLoggable(Level.INFO)) {
+                    if (LOGGER.isInfoEnabled()) {
                         LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
                         LOGGER.info("RESULTS: " + results.size());
                         LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
index f2fab5a..2243ee3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -20,7 +20,6 @@
 
 import java.io.DataOutput;
 import java.util.Random;
-import java.util.logging.Level;
 
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -110,7 +109,7 @@
 
         long start = System.currentTimeMillis();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("INSERTING INTO TREE");
         }
 
@@ -146,7 +145,7 @@
 
             tuple.reset(accessor, 0);
 
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 10000 == 0) {
                     long end = System.currentTimeMillis();
                     LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
@@ -166,7 +165,7 @@
         TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager,
                 harness.getFileReference(), btree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("\n" + stats.toString());
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
index 062abae..bc297fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
@@ -19,7 +19,6 @@
 package org.apache.hyracks.storage.am.btree;
 
 import java.util.Random;
-import java.util.logging.Level;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -93,7 +92,7 @@
 
         long start = System.currentTimeMillis();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("INSERTING INTO TREE");
         }
 
@@ -108,7 +107,7 @@
             int f0 = rnd.nextInt() % 10000;
             int f1 = 5;
             TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 10000 == 0) {
                     long end = System.currentTimeMillis();
                     LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
@@ -126,12 +125,12 @@
         }
         long end = System.currentTimeMillis();
         long duration = end - start;
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("DURATION: " + duration);
         }
 
         // Update scan.
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("UPDATE SCAN:");
         }
         // Set the cursor to X latch nodes.
@@ -152,7 +151,7 @@
         }
 
         // Ordered scan to verify the values.
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("ORDERED SCAN:");
         }
         // Set the cursor to X latch nodes.
@@ -163,7 +162,7 @@
                 scanCursor.next();
                 ITupleReference tuple = scanCursor.getTuple();
                 String rec = TupleUtils.printTuple(tuple, recDescSers);
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(rec);
                 }
             }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
index bd5d5b8..48e8d51 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -21,7 +21,6 @@
 
 import java.io.DataOutput;
 import java.util.Random;
-import java.util.logging.Level;
 
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -65,7 +64,7 @@
     private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
             throws HyracksDataException {
         if (print) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
             }
         }
@@ -152,7 +151,7 @@
             // insert records with random calls to compact and compress
             for (int i = 0; i < numRecords; i++) {
 
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if ((i + 1) % 100 == 0) {
                         LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
                     }
@@ -192,7 +191,7 @@
 
             // delete records with random calls to compact and compress
             for (int i = 0; i < numRecords; i++) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if ((i + 1) % 100 == 0) {
                         LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
                     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java
index 6dcb3a4..576c6bb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java
@@ -22,7 +22,6 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Random;
-import java.util.logging.Level;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
@@ -83,7 +82,7 @@
         private void pinRandomPage() {
             int pageId = Math.abs(rnd.nextInt() % maxPages);
 
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info(workerId + " PINNING PAGE: " + pageId);
             }
 
@@ -99,7 +98,7 @@
                         break;
 
                     case FTA_READONLY: {
-                        if (LOGGER.isLoggable(Level.INFO)) {
+                        if (LOGGER.isInfoEnabled()) {
                             LOGGER.info(workerId + " S LATCHING: " + pageId);
                         }
                         page.acquireReadLatch();
@@ -108,7 +107,7 @@
                         break;
 
                     case FTA_WRITEONLY: {
-                        if (LOGGER.isLoggable(Level.INFO)) {
+                        if (LOGGER.isInfoEnabled()) {
                             LOGGER.info(workerId + " X LATCHING: " + pageId);
                         }
                         page.acquireWriteLatch();
@@ -118,13 +117,13 @@
 
                     case FTA_MIXED: {
                         if (rnd.nextInt() % 2 == 0) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
+                            if (LOGGER.isInfoEnabled()) {
                                 LOGGER.info(workerId + " S LATCHING: " + pageId);
                             }
                             page.acquireReadLatch();
                             latch = LatchType.LATCH_S;
                         } else {
-                            if (LOGGER.isLoggable(Level.INFO)) {
+                            if (LOGGER.isInfoEnabled()) {
                                 LOGGER.info(workerId + " X LATCHING: " + pageId);
                             }
                             page.acquireWriteLatch();
@@ -149,18 +148,18 @@
 
                 if (plPage.latch != null) {
                     if (plPage.latch == LatchType.LATCH_S) {
-                        if (LOGGER.isLoggable(Level.INFO)) {
+                        if (LOGGER.isInfoEnabled()) {
                             LOGGER.info(workerId + " S UNLATCHING: " + plPage.pageId);
                         }
                         plPage.page.releaseReadLatch();
                     } else {
-                        if (LOGGER.isLoggable(Level.INFO)) {
+                        if (LOGGER.isInfoEnabled()) {
                             LOGGER.info(workerId + " X UNLATCHING: " + plPage.pageId);
                         }
                         plPage.page.releaseWriteLatch(true);
                     }
                 }
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(workerId + " UNPINNING PAGE: " + plPage.pageId);
                 }
 
@@ -172,7 +171,7 @@
         }
 
         private void openFile() {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info(workerId + " OPENING FILE: " + fileId);
             }
             try {
@@ -184,7 +183,7 @@
         }
 
         private void closeFile() {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info(workerId + " CLOSING FILE: " + fileId);
             }
             try {
@@ -203,7 +202,7 @@
             while (loopCount < maxLoopCount) {
                 loopCount++;
 
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     LOGGER.info(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
                 }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java
index 20c7ff6..64c6038 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java
@@ -19,15 +19,15 @@
 
 package org.apache.hyracks.storage.am.btree.util;
 
-import java.util.logging.Logger;
-
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.After;
 import org.junit.Before;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public abstract class AbstractBTreeTest {
-    protected final Logger LOGGER = Logger.getLogger(BTreeTestHarness.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     protected final BTreeTestHarness harness;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
index 597ce59..af7e9e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
@@ -133,5 +133,9 @@
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-lang3</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
index ecf1f85..0c7eed8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
@@ -73,7 +73,7 @@
      */
     @Test
     public void additionalFilteringingExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Testing LSMBTree component filters.");
         }
 
@@ -106,7 +106,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -119,7 +119,7 @@
             int f0 = rnd.nextInt() % numInserts;
             int f1 = i;
             TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 if (i % 1000 == 0) {
                     LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
                 }
@@ -127,7 +127,7 @@
             indexAccessor.insert(tuple);
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java
index 8a1444e..790cddd 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java
@@ -205,7 +205,7 @@
             // keys the cube root of numTuples, etc.
             int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
             for (int i = 0; i < numTuples; i++) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                         LOGGER.info("Generating Tuple " + (i + 1) + "/" + numTuples);
                     }
@@ -280,7 +280,7 @@
             }
 
             for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
-                if (LOGGER.isLoggable(Level.INFO)) {
+                if (LOGGER.isInfoEnabled()) {
                     if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
                         LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
                     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
index 80677db..803c5cb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
@@ -26,7 +26,6 @@
 import java.util.Date;
 import java.util.List;
 import java.util.Random;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -51,9 +50,11 @@
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.test.support.TestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class LSMBTreeTestHarness {
-    protected static final Logger LOGGER = Logger.getLogger(LSMBTreeTestHarness.class.getName());
+    protected static final Logger LOGGER = LogManager.getLogger();
 
     public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST =
             new BTreeLeafFrameType[] { BTreeLeafFrameType.REGULAR_NSM };
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
index e6583fe..6d8929f 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
@@ -107,6 +107,10 @@
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
index d9ca115..1a0fd87 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
@@ -22,8 +22,6 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
 import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -35,11 +33,13 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
 import org.apache.hyracks.storage.common.IIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 public abstract class AbstractInvertedIndexSearchTest extends AbstractInvertedIndexTest {
 
-    protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexSearchTest.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     protected int NUM_DOC_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_DOC_QUERIES;
     protected int NUM_RANDOM_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_RANDOM_QUERIES;
@@ -70,7 +70,7 @@
         invIndex.validate();
 
         for (IInvertedIndexSearchModifier searchModifier : searchModifiers) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Running searches with: " + searchModifier.toString());
             }
             LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(), NUM_DOC_QUERIES,
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
index 5197812..757c9d8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
@@ -20,8 +20,6 @@
 package org.apache.hyracks.storage.am.lsm.invertedindex.common;
 
 import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
@@ -33,11 +31,13 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
 import org.apache.hyracks.storage.common.IIndex;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.After;
 import org.junit.Before;
 
 public abstract class AbstractInvertedIndexTest {
-    protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexTest.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     protected final LSMInvertedIndexTestHarness harness = new LSMInvertedIndexTestHarness();
 
@@ -74,7 +74,7 @@
      */
     protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException {
         IIndex invIndex = testCtx.getIndex();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Validating index: " + invIndex);
         }
         // Validate index and compare against expected index.
@@ -92,7 +92,7 @@
     protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen)
             throws IOException {
         for (IInvertedIndexSearchModifier searchModifier : TEST_SEARCH_MODIFIERS) {
-            if (LOGGER.isLoggable(Level.INFO)) {
+            if (LOGGER.isInfoEnabled()) {
                 LOGGER.info("Running test workload with: " + searchModifier.toString());
             }
             LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(),
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
index e0427e8..3b2641d 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
@@ -21,8 +21,6 @@
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
@@ -34,11 +32,13 @@
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
 import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.Test;
 
 public class LSMInvertedIndexMultiThreadTest {
 
-    protected final Logger LOGGER = Logger.getLogger(LSMInvertedIndexMultiThreadTest.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
 
     // Machine-specific number of threads to use for testing.
     protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
@@ -60,7 +60,7 @@
 
     protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numThreads,
             TestWorkloadConf conf, String dataMsg) throws InterruptedException, HyracksDataException {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("LSMInvertedIndex MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
                     + "; Workload: " + conf.toString() + ".");
         }
@@ -75,7 +75,7 @@
         testCtx.getIndex().validate();
         driver.deinit();
 
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("LSMInvertedIndex MultiThread Test Time: " + times[0] + "ms");
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
index 9bfe6e9..a7bf5a7 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
@@ -98,5 +98,9 @@
       <artifactId>hyracks-data-std</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
index aefa385..00b3d28 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
@@ -48,7 +48,7 @@
      */
     @Test
     public void additionalFilteringingExample() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Testing LSMRTree or LSMRTreeWithAntiMatterTuples component filters.");
         }
 
@@ -116,7 +116,7 @@
         treeIndex.activate();
 
         long start = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("Inserting into tree...");
         }
         ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -143,7 +143,7 @@
             }
         }
         long end = System.currentTimeMillis();
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
index fd910e1..8a5d0c5 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
@@ -26,7 +26,6 @@
 import java.util.Date;
 import java.util.List;
 import java.util.Random;
-import java.util.logging.Logger;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -52,7 +51,6 @@
 import org.apache.hyracks.test.support.TestUtils;
 
 public class LSMRTreeTestHarness {
-    protected static final Logger LOGGER = Logger.getLogger(LSMRTreeTestHarness.class.getName());
 
     private static final long RANDOM_SEED = 50;
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index c984adb..1b34fde 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -86,5 +86,9 @@
       <artifactId>hyracks-data-std</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
index d2cc96b..15f69bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -21,7 +21,6 @@
 
 import java.util.ArrayList;
 import java.util.Random;
-import java.util.logging.Level;
 
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -78,7 +77,7 @@
     @SuppressWarnings({ "unchecked", "rawtypes" })
     @Test
     public void rangeSearchTest() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
+        if (LOGGER.isInfoEnabled()) {
             LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
         }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
index 1e49e8a..ed94bf3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
@@ -19,14 +19,14 @@
 
 package org.apache.hyracks.storage.am.rtree.utils;
 
-import java.util.logging.Logger;
-
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 import org.junit.After;
 import org.junit.Before;
 
 public abstract class AbstractRTreeTest {
-    protected final Logger LOGGER = Logger.getLogger(RTreeTestHarness.class.getName());
+    protected final Logger LOGGER = LogManager.getLogger();
     protected final RTreeTestHarness harness;
 
     public AbstractRTreeTest() {
diff --git a/hyracks-fullstack/hyracks/hyracks-util/pom.xml b/hyracks-fullstack/hyracks/hyracks-util/pom.xml
index 419166b..52ccf2f 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-util/pom.xml
@@ -71,6 +71,10 @@
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-collections4</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-api</artifactId>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java
index 9a65d72..a1f9346 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java
@@ -24,14 +24,15 @@
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import org.apache.commons.lang3.SystemUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class DiskUtil {
 
-    private static final Logger LOGGER = Logger.getLogger(DiskUtil.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private DiskUtil() {
         throw new AssertionError("Util class should not be initialized.");
@@ -119,7 +120,7 @@
                     LOGGER.info(line);
                 }
             } catch (IOException e) {
-                LOGGER.log(Level.WARNING, e.getMessage(), e);
+                LOGGER.log(Level.WARN, e.getMessage(), e);
             }
         }).start();
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
index b039227..c54c9dc 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
@@ -18,12 +18,13 @@
  */
 package org.apache.hyracks.util;
 
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 @SuppressWarnings("squid:S1147")
 public class ExitUtil {
 
-    private static final Logger LOGGER = Logger.getLogger(ExitUtil.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final ExitThread exitThread = new ExitThread();
 
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
index dcdf140..158ab66 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
@@ -23,7 +23,9 @@
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonNode;
@@ -34,7 +36,7 @@
 
 public class JSONUtil {
 
-    private static final Logger LOGGER = Logger.getLogger(JSONUtil.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private static final String INDENT = "\t";
 
@@ -63,8 +65,8 @@
         try {
             return appendObj(new StringBuilder(), om.readTree(str), initialIndent).toString();
         } catch (IOException e) {
-            LOGGER.finest(String.valueOf(e));
-            LOGGER.finest("Could not indent JSON string, returning the input string: " + str);
+            LOGGER.trace(String.valueOf(e));
+            LOGGER.trace("Could not indent JSON string, returning the input string: " + str);
             return str;
         }
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java
index 410097e..5a8edbd 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java
@@ -22,12 +22,14 @@
 import java.lang.management.RuntimeMXBean;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
 
 public class PidHelper {
 
-    private static final Logger LOGGER = Logger.getLogger(PidHelper.class.getName());
+    private static final Logger LOGGER = LogManager.getLogger();
 
     private PidHelper() {
     }