[ASTERIXDB-1564][CONF] Consistently Use Log4j2
- user model changes: no
- storage format changes: no
- interface changes: no
Details:
- Replace java.util.logging by Log4j2.
- Excluded classes due to their tests:
- IoUtil
- Tracer
Change-Id: Ic137571292f45de1f1994c61d328b97185012197
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2226
Sonar-Qube: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Contrib: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Michael Blow <mblow@apache.org>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
diff --git a/asterixdb/asterix-active/pom.xml b/asterixdb/asterix-active/pom.xml
index 6b0c381..af10ed1 100644
--- a/asterixdb/asterix-active/pom.xml
+++ b/asterixdb/asterix-active/pom.xml
@@ -51,5 +51,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java
index 264e9bc..bfa648a 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java
@@ -29,8 +29,6 @@
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.message.ActiveManagerMessage;
import org.apache.asterix.active.message.ActiveStatsResponse;
@@ -42,10 +40,13 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.util.JavaSerializationUtils;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ActiveManager {
- private static final Logger LOGGER = Logger.getLogger(ActiveManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int SHUTDOWN_TIMEOUT_SECS = 60;
private final ExecutorService executor;
@@ -102,7 +103,7 @@
requestStats((ActiveStatsRequestMessage) message);
break;
default:
- LOGGER.warning("Unknown message type received: " + message.getKind());
+ LOGGER.warn("Unknown message type received: " + message.getKind());
}
}
@@ -112,7 +113,7 @@
IActiveRuntime runtime = runtimes.get(runtimeId);
long reqId = message.getReqId();
if (runtime == null) {
- LOGGER.warning("Request stats of a runtime that is not registered " + runtimeId);
+ LOGGER.warn("Request stats of a runtime that is not registered " + runtimeId);
// Send a failure message
((NodeControllerService) serviceCtx.getControllerService())
.sendApplicationMessageToCC(
@@ -132,7 +133,7 @@
}
public void shutdown() {
- LOGGER.warning("Shutting down ActiveManager on node " + nodeId);
+ LOGGER.warn("Shutting down ActiveManager on node " + nodeId);
Map<ActiveRuntimeId, Future<Void>> stopFutures = new HashMap<>();
shutdown = true;
runtimes.forEach((runtimeId, runtime) -> stopFutures.put(runtimeId, executor.submit(() -> {
@@ -144,22 +145,22 @@
try {
entry.getValue().get(SHUTDOWN_TIMEOUT_SECS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
- LOGGER.warning("Interrupted waiting to stop runtime: " + entry.getKey());
+ LOGGER.warn("Interrupted waiting to stop runtime: " + entry.getKey());
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
- LOGGER.log(Level.WARNING, "Exception while stopping runtime: " + entry.getKey(), e);
+ LOGGER.log(Level.WARN, "Exception while stopping runtime: " + entry.getKey(), e);
} catch (TimeoutException e) {
- LOGGER.log(Level.WARNING, "Timed out waiting to stop runtime: " + entry.getKey(), e);
+ LOGGER.log(Level.WARN, "Timed out waiting to stop runtime: " + entry.getKey(), e);
}
});
- LOGGER.warning("Shutdown ActiveManager on node " + nodeId + " complete");
+ LOGGER.warn("Shutdown ActiveManager on node " + nodeId + " complete");
}
private void stopRuntime(ActiveManagerMessage message) {
ActiveRuntimeId runtimeId = (ActiveRuntimeId) message.getPayload();
IActiveRuntime runtime = runtimes.get(runtimeId);
if (runtime == null) {
- LOGGER.warning("Request to stop runtime: " + runtimeId
+ LOGGER.warn("Request to stop runtime: " + runtimeId
+ " that is not registered. Could be that the runtime completed execution on"
+ " this node before the cluster controller sent the stop request");
} else {
@@ -168,7 +169,7 @@
stopIfRunning(runtimeId, runtime);
} catch (Exception e) {
// TODO(till) Figure out a better way to handle failure to stop a runtime
- LOGGER.log(Level.WARNING, "Failed to stop runtime: " + runtimeId, e);
+ LOGGER.log(Level.WARN, "Failed to stop runtime: " + runtimeId, e);
}
});
}
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
index 27ecb52..1df9020 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
@@ -18,9 +18,6 @@
*/
package org.apache.asterix.active;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.active.message.ActivePartitionMessage;
import org.apache.asterix.active.message.ActivePartitionMessage.Event;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -29,11 +26,14 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class ActiveSourceOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable
implements IActiveRuntime {
- private static final Logger LOGGER = Logger.getLogger(ActiveSourceOperatorNodePushable.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final IHyracksTaskContext ctx;
protected final ActiveManager activeManager;
/** A unique identifier for the runtime **/
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java
index 070d838..b5b07ff 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java
@@ -24,20 +24,21 @@
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.transaction.management.service.transaction.TxnIdFactory;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.job.DeployedJobSpecId;
import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Provides functionality for running DeployedJobSpecs
*/
public class DeployedJobService {
- private static final Logger LOGGER = Logger.getLogger(DeployedJobService.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
//To enable new Asterix TxnId for separate deployed job spec invocations
private static final byte[] TRANSACTION_ID_PARAMETER_NAME = "TxnIdParameter".getBytes();
@@ -57,7 +58,7 @@
scheduledExecutorService.shutdown();
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Job Failed to run for " + entityId.getExtensionName() + " "
+ LOGGER.log(Level.ERROR, "Job Failed to run for " + entityId.getExtensionName() + " "
+ entityId.getDataverse() + "." + entityId.getEntityName() + ".", e);
}
}
@@ -68,8 +69,8 @@
public static boolean runRepetitiveDeployedJobSpec(DeployedJobSpecId distributedId, IHyracksClientConnection hcc,
Map<byte[], byte[]> jobParameters, long duration, EntityId entityId) throws Exception {
long executionMilliseconds = runDeployedJobSpec(distributedId, hcc, jobParameters, entityId);
- if (executionMilliseconds > duration && LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE,
+ if (executionMilliseconds > duration && LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR,
"Periodic job for " + entityId.getExtensionName() + " " + entityId.getDataverse() + "."
+ entityId.getEntityName() + " was unable to meet the required period of " + duration
+ " milliseconds. Actually took " + executionMilliseconds + " execution will shutdown"
diff --git a/asterixdb/asterix-algebra/pom.xml b/asterixdb/asterix-algebra/pom.xml
index d9dab35..6ec94ca 100644
--- a/asterixdb/asterix-algebra/pom.xml
+++ b/asterixdb/asterix-algebra/pom.xml
@@ -238,5 +238,9 @@
<groupId>org.apache.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index f676bff..d1e7d5c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -132,7 +132,7 @@
if (allClosed) {
expr.setFunctionInfo(
FunctionUtil.getFunctionInfo(BuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
- GlobalConfig.ASTERIX_LOGGER.finest("Switching to CLOSED record constructor in " + expr + ".\n");
+ GlobalConfig.ASTERIX_LOGGER.trace("Switching to CLOSED record constructor in " + expr + ".\n");
changed = true;
}
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
index 95f0de9..546652b 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
@@ -24,7 +24,6 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
-import java.util.logging.Logger;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.exceptions.CompilationException;
@@ -69,8 +68,6 @@
public class IntroduceLSMComponentFilterRule implements IAlgebraicRewriteRule {
- static final Logger LOGGER = Logger.getLogger(IntroduceLSMComponentFilterRule.class.getName());
-
protected IVariableTypeEnvironment typeEnvironment = null;
@Override
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 414d464..ae60c71 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -21,8 +21,6 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
import org.apache.asterix.common.cluster.IClusterStateManager;
@@ -41,6 +39,8 @@
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Base class for language translators. Contains the common validation logic for language
@@ -48,7 +48,7 @@
*/
public abstract class AbstractLangTranslator {
- private static final Logger LOGGER = Logger.getLogger(AbstractLangTranslator.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt)
throws AlgebricksException {
@@ -63,8 +63,8 @@
} catch (HyracksDataException e) {
throw new AsterixException(e);
} catch (InterruptedException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
}
Thread.currentThread().interrupt();
}
@@ -72,7 +72,7 @@
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state."
+ "\n One or more Node Controllers have left or haven't joined yet.\n");
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
}
}
@@ -92,8 +92,8 @@
waitCycleCount++;
}
} catch (InterruptedException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Thread interrupted while waiting for cluster to complete global recovery ");
}
Thread.currentThread().interrupt();
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java
index 6e67612..1160aaa 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java
@@ -21,9 +21,9 @@
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
/**
* SessionConfig captures several different parameters for controlling
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index c250a2a..58bbad2 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -409,10 +409,6 @@
<artifactId>hyracks-storage-am-lsm-btree</artifactId>
</dependency>
<dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </dependency>
- <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<classifier>tests</classifier>
@@ -580,5 +576,13 @@
<artifactId>hyracks-comm</artifactId>
<version>${hyracks.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-core</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index ea32e94..54dc064 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -26,13 +26,10 @@
import java.net.Inet4Address;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.external.ExternalUDFLibrarian;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
import org.apache.asterix.common.api.INcApplicationContext;
-import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.config.PropertiesAccessor;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -52,10 +49,14 @@
import org.apache.hyracks.control.common.controllers.ControllerConfig;
import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.kohsuke.args4j.CmdLineException;
@SuppressWarnings({"squid:ClassVariableVisibilityCheck","squid:S00112"})
public class AsterixHyracksIntegrationUtil {
+
public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
public static final String DEFAULT_CONF_FILE = joinPath("asterixdb", "asterix-app", "src", "test", "resources",
@@ -91,7 +92,7 @@
integrationUtil.run(Boolean.getBoolean("cleanup.start"), Boolean.getBoolean("cleanup.shutdown"),
System.getProperty("external.lib", ""), System.getProperty("conf.path", DEFAULT_CONF_FILE));
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unexpected exception", e);
+ LOGGER.log(Level.WARN, "Unexpected exception", e);
System.exit(1);
}
}
@@ -141,7 +142,7 @@
try {
nc.start();
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
}
}
};
@@ -313,7 +314,7 @@
try {
deinit(cleanupOnShutdown);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unexpected exception on shutdown", e);
+ LOGGER.log(Level.WARN, "Unexpected exception on shutdown", e);
}
}
});
@@ -331,7 +332,7 @@
try {
deinit(cleanupOnShutdown);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unexpected exception on shutdown", e);
+ LOGGER.log(Level.WARN, "Unexpected exception on shutdown", e);
}
}
});
@@ -347,7 +348,7 @@
}
static class LoggerHolder {
- static final Logger LOGGER = Logger.getLogger(AsterixHyracksIntegrationUtil.class.getName());
+ static final Logger LOGGER = LogManager.getLogger();
private LoggerHolder() {
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
index 3912bd5..b8c737d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
@@ -24,8 +24,6 @@
import java.io.PrintWriter;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.result.ResultReader;
import org.apache.asterix.common.api.IApplicationContext;
@@ -36,9 +34,12 @@
import org.apache.hyracks.client.dataset.HyracksDataset;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class AbstractQueryApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(AbstractQueryApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final IApplicationContext appCtx;
public enum ResultFields {
@@ -106,7 +107,7 @@
try {
return doGetHyracksDataset();
} catch (IPCException e) {
- LOGGER.log(Level.WARNING, "Failed getting hyracks dataset connection. Resetting hyracks connection.", e);
+ LOGGER.log(Level.WARN, "Failed getting hyracks dataset connection. Resetting hyracks connection.", e);
ctx.put(HYRACKS_CONNECTION_ATTR, appCtx.getHcc());
return doGetHyracksDataset();
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ActiveStatsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ActiveStatsApiServlet.java
index a4889ce..eb22c28 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ActiveStatsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ActiveStatsApiServlet.java
@@ -20,8 +20,6 @@
import java.io.PrintWriter;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.IActiveEntityEventsListener;
import org.apache.asterix.app.active.ActiveNotificationHandler;
@@ -30,6 +28,9 @@
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -38,7 +39,7 @@
public class ActiveStatsApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ActiveStatsApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int DEFAULT_EXPIRE_TIME = 2000;
private final ActiveNotificationHandler activeNotificationHandler;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
index a29d869..87c1c57 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
@@ -30,8 +30,6 @@
import java.io.PrintWriter;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import javax.imageio.ImageIO;
@@ -64,11 +62,14 @@
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.hyracks.http.server.utils.HttpUtil.ContentType;
import org.apache.hyracks.http.server.utils.HttpUtil.Encoding;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
public class ApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final String HTML_STATEMENT_SEPARATOR = "<!-- BEGIN -->";
private final ICcApplicationContext appCtx;
@@ -129,7 +130,7 @@
response.setStatus(HttpResponseStatus.OK);
HttpUtil.setContentType(response, ContentType.TEXT_HTML, Encoding.UTF8);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure setting content type", e);
+ LOGGER.log(Level.WARN, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
@@ -174,7 +175,7 @@
GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
ResultUtil.webUIParseExceptionHandler(out, pe, query);
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, e.getMessage(), e);
ResultUtil.webUIErrorHandler(out, e);
}
}
@@ -188,7 +189,7 @@
try {
HttpUtil.setContentType(response, HttpUtil.ContentType.TEXT_HTML, HttpUtil.Encoding.UTF8);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure setting content type", e);
+ LOGGER.log(Level.WARN, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
@@ -217,7 +218,7 @@
HttpUtil.Encoding.UTF8);
writeOutput(response, is, resourcePath);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure handling request", e);
+ LOGGER.log(Level.WARN, "Failure handling request", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
@@ -230,7 +231,7 @@
try {
line = br.readLine();
} catch (NullPointerException e) {
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"NPE reading resource " + resourcePath + ", assuming JDK-8080094; returning 404", e);
// workaround lame JDK bug where a broken InputStream is returned in case the resourcePath is a
// directory; see https://bugs.openjdk.java.net/browse/JDK-8080094
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
index 18e837a..38ee10c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
@@ -24,8 +24,6 @@
import java.io.PrintWriter;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.config.IOption;
@@ -38,6 +36,9 @@
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -47,7 +48,7 @@
public class ClusterApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ClusterApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected static final String NODE_ID_KEY = "node_id";
protected static final String CONFIG_URI_KEY = "configUri";
protected static final String STATS_URI_KEY = "statsUri";
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java
index dcd43cb..82ade81 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java
@@ -23,14 +23,15 @@
import java.io.IOException;
import java.io.PrintWriter;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -38,7 +39,7 @@
public class ClusterControllerDetailsApiServlet extends ClusterApiServlet {
- private static final Logger LOGGER = Logger.getLogger(ClusterControllerDetailsApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public ClusterControllerDetailsApiServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx,
String... paths) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index ba7ee12..5fe88f0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -25,8 +25,6 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.metadata.MetadataManager;
@@ -42,6 +40,9 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
@@ -56,7 +57,7 @@
* in parallel from existing AsterixDB datasets.
*/
public class ConnectorApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ConnectorApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ICcApplicationContext appCtx;
public ConnectorApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
@@ -70,7 +71,7 @@
try {
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure setting content type", e);
+ LOGGER.log(Level.WARN, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
response.writer().write(e.toString());
return;
@@ -128,7 +129,7 @@
metadataProvider.getLocks().unlock();
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure handling a request", e);
+ LOGGER.log(Level.WARN, "Failure handling a request", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
out.write(e.toString());
} finally {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
index a79b137..f7f8385 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
@@ -31,8 +31,6 @@
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
@@ -41,6 +39,9 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -49,7 +50,7 @@
import io.netty.handler.codec.http.HttpResponseStatus;
public class DiagnosticsApiServlet extends NodeControllerDetailsApiServlet {
- private static final Logger LOGGER = Logger.getLogger(DiagnosticsApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final IHyracksClientConnection hcc;
protected final ExecutorService executor;
@@ -138,7 +139,7 @@
try {
outputMap.put(entry.getKey(), entry.getValue().get());
} catch (ExecutionException e) {
- LOGGER.log(Level.WARNING, "unexpected exception obtaining value for " + entry.getKey(), e);
+ LOGGER.log(Level.WARN, "unexpected exception obtaining value for " + entry.getKey(), e);
errorMap.put(entry.getKey(), new TextNode(String.valueOf(e)));
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryCancellationServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryCancellationServlet.java
index 637c492..87beae1 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryCancellationServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryCancellationServlet.java
@@ -23,8 +23,6 @@
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.message.CancelQueryRequest;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
@@ -32,6 +30,9 @@
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
@@ -39,7 +40,7 @@
* The servlet provides a REST API on an NC for cancelling an on-going query.
*/
public class NCQueryCancellationServlet extends QueryCancellationServlet {
- private static final Logger LOGGER = Logger.getLogger(NCQueryCancellationServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final INCServiceContext serviceCtx;
private final INCMessageBroker messageBroker;
@@ -65,9 +66,7 @@
cancelQueryFuture.get(DEFAULT_NC_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
response.setStatus(HttpResponseStatus.OK);
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Unexpected exception while canceling query", e);
- }
+ LOGGER.log(Level.ERROR, "Unexpected exception while canceling query", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
} finally {
messageBroker.deregisterMessageFuture(cancelQueryFuture.getFutureId());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryServiceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryServiceServlet.java
index cd1064b..8a8342e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryServiceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NCQueryServiceServlet.java
@@ -25,7 +25,6 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
-import java.util.logging.Level;
import org.apache.asterix.algebra.base.ILangExtension;
import org.apache.asterix.app.message.CancelQueryRequest;
@@ -50,6 +49,7 @@
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.Level;
import io.netty.handler.codec.http.HttpResponseStatus;
@@ -155,7 +155,7 @@
protected void handleExecuteStatementException(Throwable t, RequestExecutionState execution) {
if (t instanceof TimeoutException
|| (t instanceof HyracksDataException && ExceptionUtils.getRootCause(t) instanceof IPCException)) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, t.toString(), t);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.WARN, t.toString(), t);
execution.setStatus(ResultStatus.FAILED, HttpResponseStatus.SERVICE_UNAVAILABLE);
} else {
super.handleExecuteStatementException(t, execution);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
index f443d09..8ca0947 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
@@ -26,8 +26,6 @@
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.cluster.IClusterStateManager;
@@ -36,6 +34,9 @@
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
@@ -45,7 +46,7 @@
public class NodeControllerDetailsApiServlet extends ClusterApiServlet {
- private static final Logger LOGGER = Logger.getLogger(NodeControllerDetailsApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public NodeControllerDetailsApiServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx,
String... paths) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java
index 96ff71c..6c94344 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java
@@ -20,8 +20,6 @@
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.translator.IStatementExecutorContext;
import org.apache.hyracks.api.client.IHyracksClientConnection;
@@ -29,6 +27,9 @@
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
@@ -36,7 +37,7 @@
* The servlet provides a REST API for cancelling an on-going query.
*/
public class QueryCancellationServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(QueryCancellationServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected static final String CLIENT_CONTEXT_ID = "client_context_id";
public QueryCancellationServlet(ConcurrentMap<String, Object> ctx, String... paths) {
@@ -71,7 +72,7 @@
// response: OK
response.setStatus(HttpResponseStatus.OK);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "unexpected exception thrown from cancel", e);
+ LOGGER.log(Level.WARN, "unexpected exception thrown from cancel", e);
// response: INTERNAL SERVER ERROR
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
index cce5099..de96d54 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
@@ -20,8 +20,6 @@
import java.io.PrintWriter;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.result.ResultHandle;
import org.apache.asterix.app.result.ResultReader;
@@ -35,11 +33,14 @@
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
public class QueryResultApiServlet extends AbstractQueryApiServlet {
- private static final Logger LOGGER = Logger.getLogger(QueryResultApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public QueryResultApiServlet(ConcurrentMap<String, Object> ctx, IApplicationContext appCtx, String... paths) {
super(appCtx, ctx, paths);
@@ -104,13 +105,13 @@
}
response.setStatus(HttpResponseStatus.BAD_REQUEST);
out.println(e.getMessage());
- LOGGER.log(Level.WARNING, "Error retrieving result for \"" + strHandle + "\"", e);
+ LOGGER.log(Level.WARN, "Error retrieving result for \"" + strHandle + "\"", e);
} catch (Exception e) {
response.setStatus(HttpResponseStatus.BAD_REQUEST);
- LOGGER.log(Level.WARNING, "Error retrieving result for \"" + strHandle + "\"", e);
+ LOGGER.log(Level.WARN, "Error retrieving result for \"" + strHandle + "\"", e);
}
if (out.checkError()) {
- LOGGER.warning("Error flushing output writer for \"" + strHandle + "\"");
+ LOGGER.warn("Error flushing output writer for \"" + strHandle + "\"");
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
index 0b6151f..71ed6b0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
@@ -24,8 +24,6 @@
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.algebra.base.ILangExtension;
import org.apache.asterix.common.api.Duration;
@@ -56,6 +54,9 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -68,7 +69,7 @@
import io.netty.handler.codec.http.HttpResponseStatus;
public class QueryServiceServlet extends AbstractQueryApiServlet {
- protected static final Logger LOGGER = Logger.getLogger(QueryServiceServlet.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected final ILangExtension.Language queryLanguage;
private final ILangCompilationProvider compilationProvider;
private final IStatementExecutorFactory statementExecutorFactory;
@@ -98,10 +99,10 @@
} catch (IOException e) {
// Servlet methods should not throw exceptions
// http://cwe.mitre.org/data/definitions/600.html
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, e.getMessage(), e);
} catch (Throwable th) {// NOSONAR: Logging and re-throwing
try {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, th.getMessage(), th);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, th.getMessage(), th);
} catch (Throwable ignored) { // NOSONAR: Logging failure
}
throw th;
@@ -363,7 +364,7 @@
param.timeout = getOptText(jsonRequest, Parameter.TIMEOUT.str());
} catch (JsonParseException | JsonMappingException e) {
// if the JSON parsing fails, the statement is empty and we get an empty statement error
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, e.getMessage(), e);
}
} else {
param.statement = request.getParameter(Parameter.STATEMENT.str());
@@ -472,7 +473,7 @@
sessionOutput.out().print("}\n");
sessionOutput.out().flush();
if (sessionOutput.out().checkError()) {
- LOGGER.warning("Error flushing output writer");
+ LOGGER.warn("Error flushing output writer");
}
}
@@ -504,14 +505,14 @@
GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, t.getMessage(), t);
execution.setStatus(ResultStatus.FATAL, HttpResponseStatus.BAD_REQUEST);
} else if (t instanceof HyracksException) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, t.getMessage(), t);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.WARN, t.getMessage(), t);
if (((HyracksException) t).getErrorCode() == ErrorCode.QUERY_TIMEOUT) {
execution.setStatus(ResultStatus.TIMEOUT, HttpResponseStatus.OK);
} else {
execution.setStatus(ResultStatus.FATAL, HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
} else {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", t);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.WARN, "Unexpected exception", t);
execution.setStatus(ResultStatus.FATAL, HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
index cec65f7..e55d82a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
@@ -24,8 +24,6 @@
import java.io.StringWriter;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.result.ResultHandle;
import org.apache.asterix.app.result.ResultReader;
@@ -35,11 +33,14 @@
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
public class QueryStatusApiServlet extends AbstractQueryApiServlet {
- private static final Logger LOGGER = Logger.getLogger(QueryStatusApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public QueryStatusApiServlet(ConcurrentMap<String, Object> ctx, IApplicationContext appCtx, String... paths) {
super(appCtx, ctx, paths);
@@ -91,7 +92,7 @@
response.setStatus(httpStatus);
response.writer().print(result);
if (response.writer().checkError()) {
- LOGGER.warning("Error flushing output writer");
+ LOGGER.warn("Error flushing output writer");
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
index db39c5e..6680eb9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
@@ -21,8 +21,6 @@
import java.io.IOException;
import java.io.PrintWriter;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.config.ExternalProperties;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
@@ -30,13 +28,16 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.StaticResourceServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.netty.handler.codec.http.HttpResponseStatus;
public class QueryWebInterfaceServlet extends StaticResourceServlet {
- private static final Logger LOGGER = Logger.getLogger(QueryWebInterfaceServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ICcApplicationContext appCtx;
public QueryWebInterfaceServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx, String[] paths) {
@@ -67,12 +68,12 @@
out.println(obj.toString());
return;
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failure writing response", e);
+ LOGGER.log(Level.ERROR, "Failure writing response", e);
}
try {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failure setting response status", e);
+ LOGGER.log(Level.ERROR, "Failure setting response status", e);
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
index 5bd3d3c..51f420c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
@@ -33,8 +33,6 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.active.ActiveNotificationHandler;
import org.apache.asterix.common.api.IMetadataLockManager;
@@ -52,6 +50,9 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -64,7 +65,7 @@
* - rebalance all non-metadata datasets.
*/
public class RebalanceApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(RebalanceApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String METADATA = "Metadata";
private final ICcApplicationContext appCtx;
@@ -266,9 +267,9 @@
private void sendResponse(IServletResponse response, HttpResponseStatus status, String message, Exception e) {
if (status != HttpResponseStatus.OK) {
if (e != null) {
- LOGGER.log(Level.WARNING, message, e);
+ LOGGER.log(Level.WARN, message, e);
} else {
- LOGGER.log(Level.WARNING, message);
+ LOGGER.log(Level.WARN, message);
}
}
PrintWriter out = response.writer();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
index 117d7fb..3359b9f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
@@ -24,8 +24,6 @@
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.app.result.ResultReader;
import org.apache.asterix.app.translator.QueryTranslator;
@@ -55,6 +53,9 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -63,7 +64,7 @@
import io.netty.handler.codec.http.HttpResponseStatus;
public abstract class RestApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(RestApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ICcApplicationContext appCtx;
private final ILangCompilationProvider compilationProvider;
private final IParserFactory parserFactory;
@@ -180,7 +181,7 @@
doHandle(response, query, sessionOutput, resultDelivery);
} catch (Exception e) {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
- LOGGER.log(Level.WARNING, "Failure handling request", e);
+ LOGGER.log(Level.WARN, "Failure handling request", e);
return;
}
}
@@ -212,13 +213,13 @@
translator.compileAndExecute(hcc, null, requestParameters);
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, pe.getMessage(), pe);
String errorMessage = ResultUtil.buildParseExceptionMessage(pe, query);
ObjectNode errorResp =
ResultUtil.getErrorResponse(2, errorMessage, "", ResultUtil.extractFullStackTrace(pe));
sessionOutput.out().write(OBJECT_MAPPER.writeValueAsString(errorResp));
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, e.getMessage(), e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
ResultUtil.apiErrorHandler(sessionOutput.out(), e);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
index ccbf68d..aace681 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
@@ -45,14 +45,15 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.util.JSONUtil;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class ResultUtil {
- private static final Logger LOGGER = Logger.getLogger(ResultUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final List<Pair<Character, String>> HTML_ENTITIES = Collections.unmodifiableList(
Arrays.asList(Pair.of('&', "&"), Pair.of('"', """), Pair.of('<', "<"), Pair.of('>', ">"),
Pair.of('\'', "'")));
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ShutdownApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ShutdownApiServlet.java
index ac31e24..8c4f22d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ShutdownApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ShutdownApiServlet.java
@@ -25,8 +25,6 @@
import java.io.PrintWriter;
import java.util.Date;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.config.GlobalConfig;
@@ -37,6 +35,9 @@
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -44,7 +45,7 @@
import io.netty.handler.codec.http.HttpResponseStatus;
public class ShutdownApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ShutdownApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final String NODE_ID_KEY = "node_id";
public static final String NCSERVICE_PID = "ncservice_pid";
public static final String INI = "ini";
@@ -65,14 +66,14 @@
try {
hcc.stopCluster(terminateNCServices);
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Exception stopping cluster", e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, "Exception stopping cluster", e);
}
}, "Shutdown Servlet Worker");
try {
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure handling request", e);
+ LOGGER.log(Level.WARN, "Failure handling request", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
@@ -94,7 +95,7 @@
nc.put(NCSERVICE_PID, details.get(INI).get(NCSERVICE_PID).asInt());
}
} else {
- LOGGER.warning("Unable to get node details for " + node + " from hcc");
+ LOGGER.warn("Unable to get node details for " + node + " from hcc");
}
}
jsonObject.set("cluster", clusterState);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/StorageApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/StorageApiServlet.java
index c2cda4e..e770bb3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/StorageApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/StorageApiServlet.java
@@ -25,8 +25,6 @@
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -39,6 +37,9 @@
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
@@ -48,7 +49,7 @@
public class StorageApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(StorageApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final INcApplicationContext appCtx;
public StorageApiServlet(ConcurrentMap<String, Object> ctx, INcApplicationContext appCtx, String... paths) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
index 8d028a5..8b615cf 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
@@ -24,21 +24,22 @@
import java.io.PrintWriter;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.netty.handler.codec.http.HttpResponseStatus;
public class VersionApiServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(VersionApiServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public VersionApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
super(ctx, paths);
@@ -54,7 +55,7 @@
try {
HttpUtil.setContentType(response, HttpUtil.ContentType.TEXT_PLAIN, HttpUtil.Encoding.UTF8);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure handling request", e);
+ LOGGER.log(Level.WARN, "Failure handling request", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveEntityEventsListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveEntityEventsListener.java
index bae04d5..8cbc109 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveEntityEventsListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveEntityEventsListener.java
@@ -25,8 +25,6 @@
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActiveEvent;
import org.apache.asterix.active.ActiveEvent.Kind;
@@ -57,10 +55,13 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobStatus;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class ActiveEntityEventsListener implements IActiveEntityController {
- private static final Logger LOGGER = Logger.getLogger(ActiveEntityEventsListener.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final Level level = Level.INFO;
private static final ActiveEvent STATE_CHANGED = new ActiveEvent(null, Kind.STATE_CHANGED, null, null);
private static final EnumSet<ActivityState> TRANSITION_STATES = EnumSet.of(ActivityState.RESUMING,
@@ -153,12 +154,12 @@
handle((ActivePartitionMessage) event.getEventObject());
break;
default:
- LOGGER.log(Level.FINE, "Unhandled feed event notification: " + event);
+ LOGGER.log(Level.DEBUG, "Unhandled feed event notification: " + event);
break;
}
notifySubscribers(event);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Unhandled Exception", e);
+ LOGGER.log(Level.ERROR, "Unhandled Exception", e);
}
}
@@ -181,7 +182,7 @@
protected void finish(ActiveEvent event) throws HyracksDataException {
LOGGER.log(level, "the job " + jobId + " finished");
if (numRegistered != numDeRegistered) {
- LOGGER.log(Level.WARNING, "the job " + jobId + " finished with reported runtime registrations = "
+ LOGGER.log(Level.WARN, "the job " + jobId + " finished with reported runtime registrations = "
+ numRegistered + " and deregistrations = " + numDeRegistered + " on node controllers");
}
jobId = null;
@@ -317,7 +318,7 @@
try {
subscriber.notify(event);
} catch (HyracksDataException e) {
- LOGGER.log(Level.WARNING, "Failed to notify subscriber", e);
+ LOGGER.log(Level.WARN, "Failed to notify subscriber", e);
}
if (subscriber.isDone()) {
it.remove();
@@ -379,7 +380,7 @@
setRunning(metadataProvider, true);
} catch (Exception e) {
setState(ActivityState.PERMANENTLY_FAILED);
- LOGGER.log(Level.SEVERE, "Failed to start the entity " + entityId, e);
+ LOGGER.log(Level.ERROR, "Failed to start the entity " + entityId, e);
throw HyracksDataException.create(e);
}
}
@@ -411,7 +412,7 @@
try {
setRunning(metadataProvider, false);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed to set the entity state as not running " + entityId, e);
+ LOGGER.log(Level.ERROR, "Failed to set the entity state as not running " + entityId, e);
throw HyracksDataException.create(e);
}
} else if (state == ActivityState.RUNNING) {
@@ -421,7 +422,7 @@
setRunning(metadataProvider, false);
} catch (Exception e) {
setState(ActivityState.PERMANENTLY_FAILED);
- LOGGER.log(Level.SEVERE, "Failed to stop the entity " + entityId, e);
+ LOGGER.log(Level.ERROR, "Failed to stop the entity " + entityId, e);
throw HyracksDataException.create(e);
}
} else {
@@ -470,7 +471,7 @@
subscriber.sync();
} catch (Exception e) {
synchronized (this) {
- LOGGER.log(Level.SEVERE, "Failure while waiting for " + entityId + " to become suspended", e);
+ LOGGER.log(Level.ERROR, "Failure while waiting for " + entityId + " to become suspended", e);
// failed to suspend
if (state == ActivityState.SUSPENDING) {
if (jobId != null) {
@@ -506,7 +507,7 @@
try {
rt.resumeOrRecover(metadataProvider);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure while attempting to resume " + entityId, e);
+ LOGGER.log(Level.WARN, "Failure while attempting to resume " + entityId, e);
}
} finally {
suspended = false;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
index ec0680f..da2c99a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
@@ -22,8 +22,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActiveEvent;
import org.apache.asterix.active.ActiveEvent.Kind;
@@ -48,11 +46,14 @@
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.api.job.JobStatus;
import org.apache.hyracks.api.util.SingleThreadEventProcessor;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ActiveNotificationHandler extends SingleThreadEventProcessor<ActiveEvent>
implements IActiveNotificationHandler, IJobLifecycleListener {
- private static final Logger LOGGER = Logger.getLogger(ActiveNotificationHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final Level level = Level.INFO;
public static final String ACTIVE_ENTITY_PROPERTY_NAME = "ActiveJob";
private final Map<EntityId, IActiveEntityEventsListener> entityEventListeners;
@@ -83,7 +84,7 @@
listener.notify(event);
}
} else {
- LOGGER.log(Level.SEVERE, "Entity not found for received message for job " + event.getJobId());
+ LOGGER.log(Level.ERROR, "Entity not found for received message for job " + event.getJobId());
}
}
@@ -111,7 +112,7 @@
LOGGER.log(level, "Job was found to be: " + (found ? "Active" : "Inactive"));
if (entityEventListeners.containsKey(entityId)) {
if (jobId2EntityId.containsKey(jobId)) {
- LOGGER.severe("Job is already being monitored for job: " + jobId);
+ LOGGER.error("Job is already being monitored for job: " + jobId);
return;
}
LOGGER.log(level, "monitoring started for job id: " + jobId);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java
index 0321ae4..ffef251 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java
@@ -19,8 +19,6 @@
package org.apache.asterix.app.active;
import java.util.concurrent.Callable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActivityState;
import org.apache.asterix.active.IRetryPolicy;
@@ -36,10 +34,13 @@
import org.apache.asterix.metadata.utils.MetadataLockUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RecoveryTask {
- private static final Logger LOGGER = Logger.getLogger(RecoveryTask.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final Level level = Level.INFO;
private final ActiveEntityEventsListener listener;
private volatile boolean cancelRecovery = false;
@@ -86,7 +87,7 @@
listener.setState(ActivityState.RUNNING);
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Attempt to resume " + listener.getEntityId() + " Failed", e);
+ LOGGER.log(Level.WARN, "Attempt to resume " + listener.getEntityId() + " Failed", e);
synchronized (listener) {
if (listener.getState() == ActivityState.RESUMING) {
// This will be the case if compilation failure
@@ -102,7 +103,7 @@
}
}
} else {
- LOGGER.log(Level.WARNING, "Submitting recovery task for " + listener.getEntityId());
+ LOGGER.log(Level.WARN, "Submitting recovery task for " + listener.getEntityId());
metadataProvider.getApplicationContext().getServiceContext().getControllerService().getExecutor()
.submit(() -> doRecover(retryPolicyFactory.create(listener)));
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
index 4271d55..b013c60 100755
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
@@ -27,8 +27,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
@@ -50,10 +48,13 @@
import org.apache.asterix.metadata.entities.Library;
import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExternalLibraryUtils {
- private static final Logger LOGGER = Logger.getLogger(ExternalLibraryUtils.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final FilenameFilter nonHiddenFileNameFilter = (dir, name) -> !name.startsWith(".");
private ExternalLibraryUtils() {
@@ -210,7 +211,7 @@
// Add library
MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new Library(dataverse, libraryName));
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Added library " + libraryName + " to Metadata");
}
@@ -249,13 +250,13 @@
args, function.getReturnType().trim(), function.getDefinition().trim(),
library.getLanguage().trim(), function.getFunctionType().trim());
MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
}
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Installed functions in library :" + libraryName);
}
@@ -268,19 +269,19 @@
DatasourceAdapter dsa =
new DatasourceAdapter(aid, adapterFactoryClass, IDataSourceAdapter.AdapterType.EXTERNAL);
MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Installed adapter: " + adapterName);
}
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Installed adapters in library :" + libraryName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Exception in installing library " + libraryName, e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, "Exception in installing library " + libraryName, e);
}
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
@@ -326,7 +327,7 @@
private static ClassLoader getLibraryClassLoader(String dataverse, String libraryName) throws Exception {
// Get a reference to the library directory
File installDir = getLibraryInstallDir();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Installing lirbary " + libraryName + " in dataverse " + dataverse + "."
+ " Install Directory: " + installDir.getAbsolutePath());
}
@@ -374,7 +375,7 @@
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
StringBuilder logMesg = new StringBuilder("Classpath for library " + libraryName + "\n");
for (URL url : urls) {
logMesg.append(url.getFile() + "\n");
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CancelQueryRequest.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CancelQueryRequest.java
index fb6ec37..7e76fc1 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CancelQueryRequest.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/CancelQueryRequest.java
@@ -27,12 +27,13 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.control.cc.ClusterControllerService;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CancelQueryRequest implements ICcAddressedMessage {
- private static final Logger LOGGER = Logger.getLogger(CancelQueryRequest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
private final String nodeId;
private final long reqId;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java
index ed683dc..0b8c34c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java
@@ -24,8 +24,6 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.algebra.base.ILangExtension;
import org.apache.asterix.api.http.server.ResultUtil;
@@ -55,10 +53,13 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public final class ExecuteStatementRequestMessage implements ICcAddressedMessage {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(ExecuteStatementRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
//TODO: Make configurable: https://issues.apache.org/jira/browse/ASTERIXDB-2062
public static final long DEFAULT_NC_TIMEOUT_MILLIS = TimeUnit.MINUTES.toMillis(5);
//TODO: Make configurable: https://issues.apache.org/jira/browse/ASTERIXDB-2063
@@ -130,16 +131,16 @@
} catch (AlgebricksException | HyracksException | TokenMgrError
| org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
// we trust that "our" exceptions are serializable and have a comprehensible error message
- GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, pe.getMessage(), pe);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.WARN, pe.getMessage(), pe);
responseMsg.setError(pe);
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, "Unexpected exception", e);
responseMsg.setError(new Exception(e.toString()));
}
try {
messageBroker.sendApplicationMessageToNC(responseMsg, requestNodeId);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, e.toString(), e);
+ LOGGER.log(Level.WARN, e.toString(), e);
}
}
@@ -162,7 +163,7 @@
try {
messageBroker.sendApplicationMessageToNC(responseMsg, requestNodeId);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, e.toString(), e);
+ LOGGER.log(Level.WARN, e.toString(), e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/IndexCheckpointManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/IndexCheckpointManager.java
index 446d04d..835de47 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/IndexCheckpointManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/IndexCheckpointManager.java
@@ -28,19 +28,19 @@
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.storage.IIndexCheckpointManager;
import org.apache.asterix.common.storage.IndexCheckpoint;
import org.apache.asterix.common.utils.StorageConstants;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.util.annotations.ThreadSafe;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@ThreadSafe
public class IndexCheckpointManager implements IIndexCheckpointManager {
- private static final Logger LOGGER = Logger.getLogger(IndexCheckpointManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int HISTORY_CHECKPOINTS = 1;
private static final int MAX_CHECKPOINT_WRITE_ATTEMPTS = 5;
private static final FilenameFilter CHECKPOINT_FILE_FILTER =
@@ -56,7 +56,7 @@
public synchronized void init(long lsn) throws HyracksDataException {
final List<IndexCheckpoint> checkpoints = getCheckpoints();
if (!checkpoints.isEmpty()) {
- LOGGER.warning(() -> "Checkpoints found on initializing: " + indexPath);
+ LOGGER.warn(() -> "Checkpoints found on initializing: " + indexPath);
delete();
}
IndexCheckpoint firstCheckpoint = IndexCheckpoint.first(lsn);
@@ -130,7 +130,7 @@
try {
checkpoints.add(read(checkpointFile.toPath()));
} catch (IOException e) {
- LOGGER.log(Level.WARNING, e, () -> "Couldn't read index checkpoint file: " + e);
+ LOGGER.warn(() -> "Couldn't read index checkpoint file: " + checkpointFile, e);
}
}
}
@@ -154,7 +154,7 @@
if (i == MAX_CHECKPOINT_WRITE_ATTEMPTS) {
throw HyracksDataException.create(e);
}
- LOGGER.log(Level.WARNING, e, () -> "Filed to write checkpoint at: " + indexPath);
+ LOGGER.warn(() -> "Filed to write checkpoint at: " + indexPath, e);
int nextAttempt = i + 1;
LOGGER.info(() -> "Checkpoint write attempt " + nextAttempt + "/" + MAX_CHECKPOINT_WRITE_ATTEMPTS);
}
@@ -176,7 +176,7 @@
}
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, e, () -> "Couldn't delete history checkpoints at " + indexPath);
+ LOGGER.warn(() -> "Couldn't delete history checkpoints at " + indexPath, e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
index df3ca64..a0142d5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
@@ -27,8 +27,6 @@
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.active.ActiveManager;
@@ -106,9 +104,12 @@
import org.apache.hyracks.storage.common.file.FileMapManager;
import org.apache.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
import org.apache.hyracks.storage.common.file.IResourceIdFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NCAppRuntimeContext implements INcApplicationContext {
- private static final Logger LOGGER = Logger.getLogger(NCAppRuntimeContext.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ILSMMergePolicyFactory metadataMergePolicyFactory;
private final INCServiceContext ncServiceContext;
@@ -200,8 +201,8 @@
SystemState systemState = recoveryMgr.getSystemState();
if (initialRun || systemState == SystemState.PERMANENT_DATA_LOSS) {
//delete any storage data before the resource factory is initialized
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING,
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN,
"Deleting the storage dir. initialRun = " + initialRun + ", systemState = " + systemState);
}
localResourceRepository.deleteStorageData();
@@ -460,9 +461,7 @@
@Override
public void initializeMetadata(boolean newUniverse) throws Exception {
IAsterixStateProxy proxy;
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Bootstrapping metadata");
- }
+ LOGGER.info("Bootstrapping metadata");
MetadataNode.INSTANCE.initialize(this, ncExtensionManager.getMetadataTupleTranslatorProvider(),
ncExtensionManager.getMetadataExtensions());
@@ -478,10 +477,7 @@
MetadataBootstrap.startUniverse(getServiceContext(), newUniverse);
MetadataBootstrap.startDDLRecovery();
ncExtensionManager.initializeMetadata(getServiceContext());
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Metadata node bound");
- }
+ LOGGER.info("Metadata node bound");
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
index dc22342..f458688 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
@@ -39,8 +39,6 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -77,6 +75,8 @@
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
import org.apache.hyracks.storage.common.IIndex;
import org.apache.hyracks.storage.common.LocalResource;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
/**
* This is the Recovery Manager and is responsible for rolling back a
@@ -85,7 +85,7 @@
public class RecoveryManager implements IRecoveryManager, ILifeCycleComponent {
public static final boolean IS_DEBUG_MODE = false;
- private static final Logger LOGGER = Logger.getLogger(RecoveryManager.class.getName());
+ private static final Logger LOGGER = org.apache.logging.log4j.LogManager.getLogger();
private final ITransactionSubsystem txnSubsystem;
private final LogManager logMgr;
private final boolean replicationEnabled;
@@ -126,9 +126,7 @@
//The checkpoint file doesn't exist => Failure happened during NC initialization.
//Retry to initialize the NC by setting the state to PERMANENT_DATA_LOSS
state = SystemState.PERMANENT_DATA_LOSS;
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("The checkpoint file doesn't exist: systemState = PERMANENT_DATA_LOSS");
- }
+ LOGGER.info("The checkpoint file doesn't exist: systemState = PERMANENT_DATA_LOSS");
return state;
}
@@ -147,7 +145,7 @@
long readableSmallestLSN = logMgr.getReadableSmallestLSN();
if (logMgr.getAppendLSN() == readableSmallestLSN) {
if (checkpointObject.getMinMCTFirstLsn() != AbstractCheckpointManager.SHARP_CHECKPOINT_LSN) {
- LOGGER.warning("Some(or all) of transaction log files are lost.");
+ LOGGER.warn("Some(or all) of transaction log files are lost.");
//No choice but continuing when the log files are lost.
}
state = SystemState.HEALTHY;
@@ -164,9 +162,7 @@
@Override
public void startLocalRecovery(Set<Integer> partitions) throws IOException, ACIDException {
state = SystemState.RECOVERING;
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("starting recovery ...");
- }
+ LOGGER.info("starting recovery ...");
long readableSmallestLSN = logMgr.getReadableSmallestLSN();
Checkpoint checkpointObject = checkpointManager.getLatest();
@@ -344,7 +340,7 @@
* log record.
*******************************************************************/
if (localResource == null) {
- LOGGER.log(Level.WARNING, "resource was not found for resource id " + resourceId);
+ LOGGER.log(Level.WARN, "resource was not found for resource id " + resourceId);
logRecord = logReader.next();
continue;
}
@@ -514,19 +510,19 @@
throw new ACIDException(e);
}
long lastLSN = txnContext.getLastLSN();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("rollbacking transaction log records from " + firstLSN + " to " + lastLSN);
}
// check if the transaction actually wrote some logs.
if (firstLSN == TransactionManagementConstants.LogManagerConstants.TERMINAL_LSN || firstLSN > lastLSN) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("no need to roll back as there were no operations by the txn " + txnContext.getTxnId());
}
return;
}
// While reading log records from firstLsn to lastLsn, collect uncommitted txn's Lsns
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("collecting loser transaction's LSNs from " + firstLSN + " to " + lastLSN);
}
@@ -636,7 +632,7 @@
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("undone loser transaction's effect");
LOGGER.info("[RecoveryManager's rollback log count] update/entityCommit/undo:" + updateLogCount + "/"
+ entityCommitLogCount + "/" + undoCount);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/TransactionSubsystem.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/TransactionSubsystem.java
index 41a0c3b..cd9b617 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/TransactionSubsystem.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/TransactionSubsystem.java
@@ -19,8 +19,6 @@
package org.apache.asterix.app.nc;
import java.util.concurrent.Callable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.config.TransactionProperties;
@@ -34,20 +32,21 @@
import org.apache.asterix.common.transactions.IRecoveryManager;
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.common.transactions.ITransactionSubsystem;
-import org.apache.asterix.common.utils.StorageConstants;
import org.apache.asterix.transaction.management.service.locking.ConcurrentLockManager;
import org.apache.asterix.transaction.management.service.logging.LogManager;
import org.apache.asterix.transaction.management.service.logging.LogManagerWithReplication;
import org.apache.asterix.transaction.management.service.recovery.CheckpointManagerFactory;
import org.apache.asterix.transaction.management.service.transaction.TransactionManager;
import org.apache.hyracks.api.application.INCServiceContext;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
/**
* Provider for all the sub-systems (transaction/lock/log/recovery) managers.
* Users of transaction sub-systems must obtain them from the provider.
*/
public class TransactionSubsystem implements ITransactionSubsystem {
- private static final Logger LOGGER = Logger.getLogger(TransactionSubsystem.class.getName());
+ private static final Logger LOGGER = org.apache.logging.log4j.LogManager.getLogger();
private final String id;
private final ILogManager logManager;
private final ILockManager lockManager;
@@ -74,7 +73,7 @@
final boolean replicationEnabled = repProperties.isReplicationEnabled();
final CheckpointProperties checkpointProperties = new CheckpointProperties(txnProperties, id);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Checkpoint Properties: " + checkpointProperties);
}
checkpointManager = CheckpointManagerFactory.create(this, checkpointProperties, replicationEnabled);
@@ -144,7 +143,7 @@
* However, the thread doesn't start reporting the count until the entityCommitCount > 0.
*/
static class EntityCommitProfiler implements Callable<Boolean> {
- private static final Logger LOGGER = Logger.getLogger(EntityCommitProfiler.class.getName());
+ private static final Logger LOGGER = org.apache.logging.log4j.LogManager.getLogger();
private final long reportIntervalInMillisec;
private long lastEntityCommitCount;
private int reportIntervalInSeconds;
@@ -179,7 +178,7 @@
long currentTimeStamp = System.currentTimeMillis();
long currentEntityCommitCount = txnSubsystem.profilerEntityCommitLogCount;
- LOGGER.severe("EntityCommitProfiler ReportRound[" + reportRound + "], AbsoluteTimeStamp[" + currentTimeStamp
+ LOGGER.error("EntityCommitProfiler ReportRound[" + reportRound + "], AbsoluteTimeStamp[" + currentTimeStamp
+ "], ActualRelativeTimeStamp[" + (currentTimeStamp - startTimeStamp)
+ "], ExpectedRelativeTimeStamp[" + (reportIntervalInSeconds * reportRound) + "], IIPS["
+ ((currentEntityCommitCount - lastEntityCommitCount) / reportIntervalInSeconds) + "], IPS["
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MigrateStorageResourcesTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MigrateStorageResourcesTask.java
index 503b8de..bd64ea5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MigrateStorageResourcesTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MigrateStorageResourcesTask.java
@@ -24,7 +24,6 @@
import java.nio.file.Paths;
import java.util.List;
import java.util.function.Predicate;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -44,13 +43,15 @@
import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.storage.common.ILocalResourceRepository;
import org.apache.hyracks.storage.common.LocalResource;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Migrates a legacy storage structure to the current one
*/
public class MigrateStorageResourcesTask implements INCLifecycleTask {
- private static final Logger LOGGER = Logger.getLogger(MigrateStorageResourcesTask.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
public static final int LEGACY_RESOURCES_TREE_DEPTH_FROM_STORAGE_ROOT = 5;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
index 87d3414..7db473e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
@@ -20,8 +20,6 @@
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -32,10 +30,12 @@
import org.apache.hyracks.api.lifecycle.LifeCycleComponentManager;
import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.nc.application.NCServiceContext;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class StartLifecycleComponentsTask implements INCLifecycleTask {
- private static final Logger LOGGER = Logger.getLogger(StartLifecycleComponentsTask.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
@Override
@@ -43,19 +43,17 @@
INcApplicationContext applicationContext = (INcApplicationContext) cs.getApplicationContext();
NCServiceContext serviceCtx = (NCServiceContext) cs.getContext();
MetadataProperties metadataProperties = applicationContext.getMetadataProperties();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Starting lifecycle components");
- }
+ LOGGER.info("Starting lifecycle components");
Map<String, String> lifecycleMgmtConfiguration = new HashMap<>();
String dumpPathKey = LifeCycleComponentManager.Config.DUMP_PATH_KEY;
String dumpPath = metadataProperties.getCoredumpPath(serviceCtx.getNodeId());
lifecycleMgmtConfiguration.put(dumpPathKey, dumpPath);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Coredump directory for NC is: " + dumpPath);
}
ILifeCycleComponentManager lccm = serviceCtx.getLifeCycleComponentManager();
lccm.configure(lifecycleMgmtConfiguration);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Configured:" + lccm);
}
serviceCtx.setStateDumpHandler(new AsterixStateDumpHandler(serviceCtx.getNodeId(), lccm.getDumpPath(), lccm));
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
index c00b1b6..37b4d4f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
@@ -26,8 +26,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.app.nc.task.BindMetadataNodeTask;
@@ -54,7 +52,6 @@
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.cluster.IClusterStateManager;
-import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -66,14 +63,16 @@
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.util.FaultToleranceUtil;
-import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class AutoFaultToleranceStrategy implements IFaultToleranceStrategy {
- private static final Logger LOGGER = Logger.getLogger(AutoFaultToleranceStrategy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private long clusterRequestId = 0;
private Set<String> failedNodes = new HashSet<>();
@@ -159,7 +158,7 @@
if (partitionRecoveryPlan.size() == 0) {
//no active replicas were found for the failed node
- LOGGER.severe("Could not find active replicas for the partitions " + lostPartitions);
+ LOGGER.error("Could not find active replicas for the partitions " + lostPartitions);
return;
} else {
LOGGER.info("Partitions to recover: " + lostPartitions);
@@ -179,7 +178,7 @@
* has failed. When the failure notification arrives, we will send any pending request
* that belongs to the failed NC to a different active replica.
*/
- LOGGER.log(Level.WARNING, "Failed to send takeover request: " + takeoverRequest, e);
+ LOGGER.log(Level.WARN, "Failed to send takeover request: " + takeoverRequest, e);
}
});
}
@@ -224,7 +223,7 @@
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Prepared Failback plan: " + plan.toString());
}
@@ -291,7 +290,7 @@
messageBroker.sendApplicationMessageToNC(request, request.getNodeID());
plan.addPendingRequest(request);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failed to send failback request to: " + request.getNodeID(), e);
+ LOGGER.log(Level.WARN, "Failed to send failback request to: " + request.getNodeID(), e);
plan.notifyNodeFailure(request.getNodeID());
revertFailedFailbackPlanEffects();
break;
@@ -374,7 +373,7 @@
try {
messageBroker.sendApplicationMessageToNC(request, request.getNodeId());
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failed to send complete failback request to: " + request.getNodeId(), e);
+ LOGGER.log(Level.WARN, "Failed to send complete failback request to: " + request.getNodeId(), e);
notifyFailbackPlansNodeFailure(request.getNodeId());
revertFailedFailbackPlanEffects();
}
@@ -417,7 +416,7 @@
* has failed. When the failure notification arrives, a new NC will be assigned to
* the metadata partition and a new metadata node takeover request will be sent to it.
*/
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Failed to send metadata node takeover request to: " + metadataPartiton.getActiveNodeId(), e);
}
}
@@ -477,7 +476,7 @@
}
clusterManager.refreshState();
} else {
- LOGGER.log(Level.SEVERE, msg.getNodeId() + " failed to complete startup. ", msg.getException());
+ LOGGER.log(Level.ERROR, msg.getNodeId() + " failed to complete startup. ", msg.getException());
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
index 0080bbd..02174da 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
@@ -25,8 +25,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.app.nc.task.BindMetadataNodeTask;
@@ -61,10 +59,13 @@
import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MetadataNodeFaultToleranceStrategy implements IFaultToleranceStrategy {
- private static final Logger LOGGER = Logger.getLogger(MetadataNodeFaultToleranceStrategy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private IClusterStateManager clusterManager;
private String metadataNodeId;
private IReplicationStrategy replicationStrategy;
@@ -106,7 +107,7 @@
try {
messageBroker.sendApplicationMessageToNC(msg, replica.getId());
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failed sending an application message to an NC", e);
+ LOGGER.log(Level.WARN, "Failed sending an application message to an NC", e);
continue;
}
}
@@ -170,7 +171,7 @@
private synchronized void process(ReplayPartitionLogsResponseMessage msg) {
hotStandbyMetadataReplica.add(msg.getNodeId());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Hot Standby Metadata Replicas: " + hotStandbyMetadataReplica);
}
}
@@ -211,7 +212,7 @@
}
clusterManager.refreshState();
} else {
- LOGGER.log(Level.SEVERE, msg.getNodeId() + " failed to complete startup. ", msg.getException());
+ LOGGER.log(Level.ERROR, msg.getNodeId() + " failed to complete startup. ", msg.getException());
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
index a273845..3c5442e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
@@ -23,8 +23,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.app.nc.task.BindMetadataNodeTask;
@@ -50,10 +48,13 @@
import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.client.NodeStatus;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NoFaultToleranceStrategy implements IFaultToleranceStrategy {
- private static final Logger LOGGER = Logger.getLogger(NoFaultToleranceStrategy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private IClusterStateManager clusterManager;
private String metadataNodeId;
private Set<String> pendingStartupCompletionNodes = new HashSet<>();
@@ -121,8 +122,8 @@
}
clusterManager.refreshState();
} else {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, msg.getNodeId() + " failed to complete startup. ", msg.getException());
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, msg.getNodeId() + " failed to complete startup. ", msg.getException());
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
index feca7e8..ad4afd0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
@@ -20,8 +20,6 @@
import java.io.IOException;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
@@ -29,11 +27,14 @@
import org.apache.asterix.common.replication.IRemoteRecoveryManager;
import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CompleteFailbackRequestMessage extends AbstractFailbackPlanMessage implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(CompleteFailbackRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Set<Integer> partitions;
private final String nodeId;
@@ -69,7 +70,7 @@
IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
remoteRecoeryManager.completeFailbackProcess();
} catch (IOException | InterruptedException e) {
- LOGGER.log(Level.SEVERE, "Failure during completion of failback process", e);
+ LOGGER.log(Level.ERROR, "Failure during completion of failback process", e);
hde = HyracksDataException.create(e);
} finally {
CompleteFailbackResponseMessage reponse =
@@ -77,7 +78,7 @@
try {
broker.sendMessageToCC(reponse);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failure sending message to CC", e);
+ LOGGER.log(Level.ERROR, "Failure sending message to CC", e);
hde = HyracksDataException.suppress(hde, e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/MetadataNodeRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/MetadataNodeRequestMessage.java
index bebd133..b0e1e06 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/MetadataNodeRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/MetadataNodeRequestMessage.java
@@ -18,19 +18,19 @@
*/
package org.apache.asterix.app.replication.message;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MetadataNodeRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(MetadataNodeRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final boolean export;
public MetadataNodeRequestMessage(boolean export) {
@@ -49,7 +49,7 @@
appContext.unexportMetadataNodeStub();
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed taking over metadata", e);
+ LOGGER.log(Level.ERROR, "Failed taking over metadata", e);
hde = HyracksDataException.create(e);
} finally {
MetadataNodeResponseMessage reponse =
@@ -57,7 +57,7 @@
try {
broker.sendMessageToCC(reponse);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed taking over metadata", e);
+ LOGGER.log(Level.ERROR, "Failed taking over metadata", e);
hde = HyracksDataException.suppress(hde, e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
index cefcf49..6b85050 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
@@ -20,8 +20,6 @@
import java.rmi.RemoteException;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
@@ -29,12 +27,15 @@
import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class PreparePartitionsFailbackRequestMessage extends AbstractFailbackPlanMessage
implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(PreparePartitionsFailbackRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Set<Integer> partitions;
private boolean releaseMetadataNode = false;
private final String nodeID;
@@ -86,7 +87,7 @@
try {
appContext.unexportMetadataNodeStub();
} catch (RemoteException e) {
- LOGGER.log(Level.SEVERE, "Failed unexporting metadata stub", e);
+ LOGGER.log(Level.ERROR, "Failed unexporting metadata stub", e);
throw HyracksDataException.create(e);
}
} else {
@@ -109,7 +110,7 @@
try {
broker.sendMessageToCC(reponse);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed sending message to cc", e);
+ LOGGER.log(Level.ERROR, "Failed sending message to cc", e);
throw HyracksDataException.create(e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
index 075c415..b60ef8b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
@@ -18,9 +18,6 @@
*/
package org.apache.asterix.app.replication.message;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
@@ -29,10 +26,13 @@
import org.apache.hyracks.api.client.NodeStatus;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RegistrationTasksRequestMessage implements INCLifecycleMessage, ICcAddressedMessage {
- private static final Logger LOGGER = Logger.getLogger(RegistrationTasksRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
private final SystemState state;
private final String nodeId;
@@ -51,7 +51,7 @@
systemState);
((INCMessageBroker) cs.getContext().getMessageBroker()).sendMessageToCC(msg);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Unable to send RegistrationTasksRequestMessage to CC", e);
+ LOGGER.log(Level.ERROR, "Unable to send RegistrationTasksRequestMessage to CC", e);
throw HyracksDataException.create(e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java
index 13525e3..d4c2340 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksResponseMessage.java
@@ -19,8 +19,6 @@
package org.apache.asterix.app.replication.message;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -31,10 +29,13 @@
import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.nc.NCShutdownHook;
import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RegistrationTasksResponseMessage implements INCLifecycleMessage, INcAddressedMessage {
- private static final Logger LOGGER = Logger.getLogger(RegistrationTasksResponseMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
private final String nodeId;
private final List<INCLifecycleTask> tasks;
@@ -53,16 +54,16 @@
Throwable exception = null;
try {
for (INCLifecycleTask task : tasks) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Starting startup task: " + task);
}
task.perform(cs);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Completed startup task: " + task);
}
}
} catch (Throwable e) { //NOSONAR all startup failures should be reported to CC
- LOGGER.log(Level.SEVERE, "Failed during startup task", e);
+ LOGGER.log(Level.ERROR, "Failed during startup task", e);
success = false;
exception = e;
}
@@ -72,7 +73,7 @@
broker.sendMessageToCC(result);
} catch (Exception e) {
success = false;
- LOGGER.log(Level.SEVERE, "Failed sending message to cc", e);
+ LOGGER.log(Level.ERROR, "Failed sending message to cc", e);
}
} finally {
if (!success) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
index c8e2479..beac2b5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
@@ -19,8 +19,6 @@
package org.apache.asterix.app.replication.message;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
@@ -28,10 +26,13 @@
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ReplayPartitionLogsRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
- private static final Logger LOGGER = Logger.getLogger(ReplayPartitionLogsRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
private final Set<Integer> partitions;
@@ -50,7 +51,7 @@
try {
broker.sendMessageToCC(reponse);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed sending message to cc", e);
+ LOGGER.log(Level.ERROR, "Failed sending message to cc", e);
throw HyracksDataException.create(e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
index ea9ac55..86be516 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
@@ -19,8 +19,6 @@
package org.apache.asterix.app.replication.message;
import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
@@ -29,11 +27,14 @@
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.asterix.common.replication.IRemoteRecoveryManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TakeoverPartitionsRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(TakeoverPartitionsRequestMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Integer[] partitions;
private final long requestId;
private final String nodeId;
@@ -81,7 +82,7 @@
IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
remoteRecoeryManager.takeoverPartitons(partitions);
} catch (IOException | ACIDException e) {
- LOGGER.log(Level.SEVERE, "Failure taking over partitions", e);
+ LOGGER.log(Level.ERROR, "Failure taking over partitions", e);
hde = HyracksDataException.suppress(hde, e);
} finally {
//send response after takeover is completed
@@ -90,7 +91,7 @@
try {
broker.sendMessageToCC(reponse);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failure taking over partitions", e);
+ LOGGER.log(Level.ERROR, "Failure taking over partitions", e);
hde = HyracksDataException.suppress(hde, e);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 5609605..c69f5dc 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -40,8 +40,6 @@
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutorService;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActivityState;
import org.apache.asterix.active.EntityId;
@@ -210,6 +208,9 @@
import org.apache.hyracks.control.common.job.profiling.om.JobletProfile;
import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/*
* Provides functionality for executing a batch of Query statements (queries included)
@@ -217,7 +218,7 @@
*/
public class QueryTranslator extends AbstractLangTranslator implements IStatementExecutor {
- private static final Logger LOGGER = Logger.getLogger(QueryTranslator.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final boolean IS_DEBUG_MODE = false;// true
protected final List<Statement> statements;
@@ -2061,7 +2062,7 @@
MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName()));
runJob(hcc, spec);
MetadataManager.INSTANCE.dropFeed(mdTxnCtx, feed.getDataverseName(), feed.getFeedName());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Removed feed " + feedId);
}
}
@@ -2458,7 +2459,7 @@
ResultUtil.printStatus(sessionOutput, AbstractQueryApiServlet.ResultStatus.FAILED);
ResultUtil.printError(sessionOutput.out(), e);
} else {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE,
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR,
resultDelivery.name() + " job with id " + jobId.getValue() + " " + "failed", e);
}
} finally {
@@ -2887,7 +2888,7 @@
toDataset.getHints(), toDataset.getDatasetType(), idd, withRecord, false);
this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc, null);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ LOGGER.log(Level.WARN, e.getMessage(), e);
throw new AlgebricksException("Error cleaning the result dataset. This should not happen.");
}
@@ -2941,7 +2942,7 @@
while ((line = in.readLine()) != null) {
LOGGER.info(line);
if (line.contains("Exception") || line.contains("Error")) {
- LOGGER.severe(line);
+ LOGGER.error(line);
if (line.contains("Connection refused")) {
throw new AlgebricksException(
"The connection to your Pregelix cluster was refused. Is it running? "
@@ -3045,7 +3046,7 @@
public static void abort(Exception rootE, Exception parentE, MetadataTransactionContext mdTxnCtx) {
try {
if (IS_DEBUG_MODE) {
- LOGGER.log(Level.SEVERE, rootE.getMessage(), rootE);
+ LOGGER.log(Level.ERROR, rootE.getMessage(), rootE);
}
if (mdTxnCtx != null) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 40d8996..670b2bd 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -30,8 +30,6 @@
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.api.http.ctx.StatementExecutorContext;
import org.apache.asterix.api.http.server.ActiveStatsApiServlet;
@@ -98,10 +96,14 @@
import org.apache.hyracks.http.api.IServlet;
import org.apache.hyracks.http.server.HttpServer;
import org.apache.hyracks.http.server.WebManager;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.config.Configurator;
public class CCApplication extends BaseCCApplication {
- private static final Logger LOGGER = Logger.getLogger(CCApplication.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static IAsterixStateProxy proxy;
protected ICCServiceContext ccServiceCtx;
protected CCExtensionManager ccExtensionManager;
@@ -130,9 +132,7 @@
configureLoggingLevel(ccServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Starting Asterix cluster controller");
- }
+ LOGGER.info("Starting Asterix cluster controller");
String strIP = ccServiceCtx.getCCContext().getClusterControllerInfo().getClientNetAddress();
int port = ccServiceCtx.getCCContext().getClusterControllerInfo().getClientNetPort();
@@ -191,7 +191,7 @@
protected void configureLoggingLevel(Level level) {
super.configureLoggingLevel(level);
LOGGER.info("Setting Asterix log level to " + level);
- Logger.getLogger("org.apache.asterix").setLevel(level);
+ Configurator.setLevel("org.apache.asterix", level);
}
protected List<AsterixExtension> getExtensions() {
@@ -206,9 +206,7 @@
@Override
public void stop() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Stopping Asterix cluster controller");
- }
+ LOGGER.info("Stopping Asterix cluster controller");
appCtx.getClusterStateManager().setState(SHUTTING_DOWN);
((ActiveNotificationHandler) appCtx.getActiveNotificationHandler()).stop();
AsterixStateProxy.unregisterRemoteObject();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index 217d6e2..8c87a26 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -25,8 +25,6 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork;
import org.apache.asterix.common.api.IClusterManagementWorkResponse;
@@ -41,10 +39,12 @@
import org.apache.hyracks.api.application.IClusterLifecycleListener;
import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ClusterLifecycleListener implements IClusterLifecycleListener {
- private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ICcApplicationContext appCtx;
private final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<>();
private final ClusterWorkExecutor eventHandler;
@@ -54,15 +54,13 @@
this.appCtx = appCtx;
eventHandler = new ClusterWorkExecutor(workRequestQueue);
Thread t = new Thread(eventHandler);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Starting cluster event handler");
- }
+ LOGGER.info("Starting cluster event handler");
t.start();
}
@Override
public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("NC: " + nodeId + " joined");
}
IClusterStateManager csm = appCtx.getClusterStateManager();
@@ -81,7 +79,7 @@
@Override
public void notifyNodeFailure(Collection<String> deadNodeIds) throws HyracksException {
for (String deadNode : deadNodeIds) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("NC: " + deadNode + " left");
}
IClusterStateManager csm = appCtx.getClusterStateManager();
@@ -158,7 +156,7 @@
nodesToBeAddedForWork.add(addedNodes.get(i));
}
if (nodesToBeAddedForWork.isEmpty()) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Unable to satisfy request by " + w);
}
AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
index bcc1c60..f227730 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
@@ -21,16 +21,16 @@
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork;
import org.apache.asterix.metadata.cluster.AddNodeWork;
import org.apache.asterix.metadata.cluster.RemoveNodeWork;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ClusterWorkExecutor implements Runnable {
- private static final Logger LOGGER = Logger.getLogger(ClusterWorkExecutor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final LinkedBlockingQueue<Set<IClusterManagementWork>> inbox;
@@ -63,13 +63,13 @@
}
} catch (InterruptedException e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.severe("interruped" + e.getMessage());
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.error("interruped" + e.getMessage());
}
throw new IllegalStateException(e);
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.severe("Unexpected exception in handling cluster event" + e.getMessage());
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.error("Unexpected exception in handling cluster event" + e.getMessage());
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index d6854b12..8e753c8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -22,8 +22,6 @@
import java.util.Collections;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
@@ -50,10 +48,13 @@
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.control.nc.NCShutdownHook;
import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class GlobalRecoveryManager implements IGlobalRecoveryManager {
- private static final Logger LOGGER = Logger.getLogger(GlobalRecoveryManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final IStorageComponentProvider componentProvider;
protected final ICCServiceContext serviceCtx;
protected IHyracksClientConnection hcc;
@@ -97,7 +98,7 @@
try {
recover(appCtx);
} catch (HyracksDataException e) {
- LOGGER.log(Level.SEVERE, "Global recovery failed. Shutting down...", e);
+ LOGGER.log(Level.ERROR, "Global recovery failed. Shutting down...", e);
ExitUtil.exit(NCShutdownHook.FAILED_TO_RECOVER_EXIT_CODE);
}
});
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
index 63178e1..a05b2bb 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
@@ -22,8 +22,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.api.http.server.ServletConstants;
import org.apache.asterix.api.http.server.StorageApiServlet;
@@ -61,9 +59,13 @@
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.http.server.HttpServer;
import org.apache.hyracks.http.server.WebManager;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.config.Configurator;
public class NCApplication extends BaseNCApplication {
- private static final Logger LOGGER = Logger.getLogger(NCApplication.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected INCServiceContext ncServiceCtx;
private INcApplicationContext runtimeContext;
@@ -91,7 +93,7 @@
throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
}
nodeId = this.ncServiceCtx.getNodeId();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting Asterix node controller: " + nodeId);
}
configureLoggingLevel(ncServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
@@ -105,7 +107,7 @@
runtimeContext = new NCAppRuntimeContext(ncServiceCtx, getExtensions());
MetadataProperties metadataProperties = runtimeContext.getMetadataProperties();
if (!metadataProperties.getNodeNames().contains(this.ncServiceCtx.getNodeId())) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Substitute node joining : " + this.ncServiceCtx.getNodeId());
}
updateOnNodeJoin();
@@ -121,7 +123,7 @@
if (latestCheckpoint != null) {
CompatibilityUtil.ensureCompatibility(controllerService, latestCheckpoint.getStorageVersion());
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
LOGGER.info("System state: " + recoveryMgr.getSystemState());
LOGGER.info("Node ID: " + nodeId);
@@ -134,7 +136,7 @@
@Override
protected void configureLoggingLevel(Level level) {
super.configureLoggingLevel(level);
- Logger.getLogger("org.apache.asterix").setLevel(level);
+ Configurator.setLevel("org.apache.asterix", level);
}
protected void configureServers() throws Exception {
@@ -154,7 +156,7 @@
if (!stopInitiated) {
runtimeContext.setShuttingdown(true);
stopInitiated = true;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Stopping Asterix node controller: " + nodeId);
}
@@ -167,7 +169,7 @@
ncServiceCtx.getLifeCycleComponentManager().stopAll(false);
runtimeContext.deinitialize();
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Duplicate attempt to stop ignored: " + nodeId);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
index 0eade41..8e8fb93 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
@@ -22,8 +22,6 @@
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
@@ -41,10 +39,13 @@
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
import org.apache.hyracks.control.cc.cluster.INodeManager;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CCMessageBroker implements ICCMessageBroker {
- private static final Logger LOGGER = Logger.getLogger(CCMessageBroker.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final Map<Long, MutablePair<MutableInt, MutablePair<ResponseState, Object>>> handles =
new ConcurrentHashMap<>();
@@ -58,7 +59,7 @@
@Override
public void receivedMessage(IMessage message, String nodeId) throws Exception {
ICcAddressedMessage msg = (ICcAddressedMessage) message;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Received message: " + msg);
}
ICcApplicationContext appCtx = (ICcApplicationContext) ccs.getApplicationContext();
@@ -72,8 +73,8 @@
if (state != null) {
state.getNodeController().sendApplicationMessageToNC(JavaSerializationUtils.serialize(msg), null, nodeId);
} else {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Couldn't send message to unregistered node (" + nodeId + ")");
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Couldn't send message to unregistered node (" + nodeId + ")");
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
index a2a3460..dc006b7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
@@ -20,8 +20,6 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.memory.ConcurrentFramePool;
@@ -36,10 +34,13 @@
import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.util.JavaSerializationUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MessagingChannelInterfaceFactory implements IChannelInterfaceFactory {
- private static final Logger LOGGER = Logger.getLogger(MessagingChannelInterfaceFactory.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NCMessageBroker messageBroker;
private final ConcurrentFramePool messagingFramePool;
@@ -113,8 +114,8 @@
// Queue the received message and free the network IO thread
messageBroker.queueReceivedMessage(receivedMsg);
} catch (ClassNotFoundException | IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
} finally {
recycle.accept(buffer);
@@ -143,8 +144,8 @@
try {
messagingFramePool.release(buffer);
} catch (HyracksDataException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
index 33e89f0..08e406e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
@@ -22,8 +22,6 @@
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.config.MessagingProperties;
@@ -37,12 +35,15 @@
import org.apache.hyracks.api.messages.IMessage;
import org.apache.hyracks.api.util.JavaSerializationUtils;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.util.collection.LongObjectHashMap;
import io.netty.util.collection.LongObjectMap;
public class NCMessageBroker implements INCMessageBroker {
- private static final Logger LOGGER = Logger.getLogger(NCMessageBroker.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
private final INcApplicationContext appContext;
@@ -86,7 +87,7 @@
@Override
public void receivedMessage(IMessage message, String nodeId) throws Exception {
INcAddressedMessage absMessage = (INcAddressedMessage) message;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Received message: " + absMessage);
}
absMessage.handle(appContext);
@@ -153,12 +154,12 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING) && msg != null) {
- LOGGER.log(Level.WARNING, "Could not process message : "
+ if (LOGGER.isWarnEnabled() && msg != null) {
+ LOGGER.log(Level.WARN, "Could not process message : "
+ msg, e);
} else {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Could not process message", e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Could not process message", e);
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java
index 64e9810..ddd0967 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java
@@ -20,8 +20,6 @@
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.common.cluster.IClusterStateManager;
@@ -33,10 +31,12 @@
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FaultToleranceUtil {
- private static final Logger LOGGER = Logger.getLogger(FaultToleranceUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private FaultToleranceUtil() {
throw new AssertionError();
@@ -63,9 +63,7 @@
try {
messageBroker.sendApplicationMessageToNC(msg, replica);
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failed sending an application message to an NC", e);
- }
+ LOGGER.warn("Failed sending an application message to an NC", e);
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/CompatibilityUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/CompatibilityUtil.java
index 5d44fc9..fd9b1e8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/CompatibilityUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/CompatibilityUtil.java
@@ -20,17 +20,18 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Logger;
import org.apache.asterix.app.nc.task.MigrateStorageResourcesTask;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.utils.StorageConstants;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CompatibilityUtil {
- private static final Logger LOGGER = Logger.getLogger(CompatibilityUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MIN_COMPATIBLE_VERSION = 1;
private CompatibilityUtil() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
index a04c994..f6a4aac 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
@@ -26,8 +26,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.IntStream;
import org.apache.asterix.active.IActiveEntityEventsListener;
@@ -62,12 +60,15 @@
import org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
import org.apache.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* A utility class for the rebalance operation.
*/
public class RebalanceUtil {
- private static final Logger LOGGER = Logger.getLogger(RebalanceUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private RebalanceUtil() {
@@ -173,7 +174,7 @@
work.run();
done = true;
} catch (InterruptedException e) {
- LOGGER.log(Level.WARNING, "Retry with attempt " + (++retryCount), e);
+ LOGGER.log(Level.WARN, "Retry with attempt " + (++retryCount), e);
interruptedException = e;
}
} while (!done);
diff --git a/asterixdb/asterix-app/src/main/resources/cc2.conf b/asterixdb/asterix-app/src/main/resources/cc2.conf
index 41beda7..b7e4bc3 100644
--- a/asterixdb/asterix-app/src/main/resources/cc2.conf
+++ b/asterixdb/asterix-app/src/main/resources/cc2.conf
@@ -46,7 +46,7 @@
heartbeat.period=2000
[common]
-log.level = WARNING
+log.level = WARN
compiler.framesize=32KB
compiler.sortmemory=320KB
compiler.groupmemory=160KB
diff --git a/asterixdb/asterix-app/src/main/resources/cc3.conf b/asterixdb/asterix-app/src/main/resources/cc3.conf
index 4c38081..d6e7a9d 100644
--- a/asterixdb/asterix-app/src/main/resources/cc3.conf
+++ b/asterixdb/asterix-app/src/main/resources/cc3.conf
@@ -46,7 +46,7 @@
heartbeat.period=2000
[common]
-log.level = WARNING
+log.level = WARN
compiler.framesize=32KB
compiler.sortmemory=320KB
compiler.groupmemory=160KB
diff --git a/asterixdb/asterix-app/src/main/resources/log4j2.xml b/asterixdb/asterix-app/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..28c1a61
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/resources/log4j2.xml
@@ -0,0 +1,30 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements. See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership. The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in complianceo
+ ! with the License. You may obtain a copy of the License at
+ !
+ ! http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied. See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<Configuration status="INFO">
+ <Appenders>
+ <Console name="Console" target="SYSTEM_ERR">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
+ </Console>
+ </Appenders>
+ <Loggers>
+ <Root level="INFO">
+ <AppenderRef ref="Console"/>
+ </Root>
+ </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index fddab14..23a3eda 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -25,8 +25,6 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
-import java.util.Optional;
-import java.util.logging.Logger;
import org.apache.asterix.app.external.ExternalUDFLibrarian;
import org.apache.asterix.app.nc.NCAppRuntimeContext;
@@ -67,7 +65,6 @@
import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexModificationOperationCallbackFactory;
import org.apache.asterix.transaction.management.runtime.CommitRuntime;
import org.apache.asterix.transaction.management.service.logging.LogReader;
-import org.apache.avro.generic.GenericData;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.base.IPushRuntime;
@@ -112,10 +109,12 @@
import org.apache.hyracks.storage.common.IStorageManager;
import org.apache.hyracks.test.support.TestUtils;
import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.mockito.Mockito;
public class TestNodeController {
- protected static final Logger LOGGER = Logger.getLogger(TestNodeController.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected static final String PATH_ACTUAL = "unittest" + File.separator;
protected static final String PATH_BASE = FileUtil.joinPath("src", "test", "resources", "nodetests");
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestNodeControllerActor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestNodeControllerActor.java
index a5afa0e..f849f08 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestNodeControllerActor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestNodeControllerActor.java
@@ -22,7 +22,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActiveEvent;
import org.apache.asterix.active.ActiveEvent.Kind;
@@ -32,9 +31,11 @@
import org.apache.asterix.active.message.ActivePartitionMessage.Event;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TestNodeControllerActor extends Actor {
- private static final Logger LOGGER = Logger.getLogger(TestNodeControllerActor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final String id;
private final TestClusterControllerActor clusterController;
@@ -62,7 +63,7 @@
if (registrations.remove(registration)) {
return registration.deregister();
} else {
- LOGGER.warning("Request to stop runtime: " + new ActiveRuntimeId(entityId, "Test", partition)
+ LOGGER.warn("Request to stop runtime: " + new ActiveRuntimeId(entityId, "Test", partition)
+ " that is not registered. Could be that the runtime completed execution on"
+ " this node before the cluster controller sent the stop request");
return new Action() {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java
index 7173103..cfd5373 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java
@@ -58,11 +58,11 @@
try {
parser.parse();
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + reader);
+ GlobalConfig.ASTERIX_LOGGER.warn("Failed while testing file " + reader);
StringWriter sw = new StringWriter();
PrintWriter writer = new PrintWriter(sw);
e.printStackTrace(writer);
- GlobalConfig.ASTERIX_LOGGER.warning(sw.toString());
+ GlobalConfig.ASTERIX_LOGGER.warn(sw.toString());
throw new ParseException("Parsing " + queryFile.toString());
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/RebalanceCancellationTestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/RebalanceCancellationTestExecutor.java
index a63cb76..3318459 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/RebalanceCancellationTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/RebalanceCancellationTestExecutor.java
@@ -28,7 +28,6 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import java.util.logging.Level;
import org.apache.asterix.common.exceptions.ExceptionUtils;
import org.apache.asterix.common.utils.Servlets;
@@ -39,6 +38,7 @@
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
+import org.apache.logging.log4j.Level;
import org.junit.Assert;
public class RebalanceCancellationTestExecutor extends TestExecutor {
@@ -84,7 +84,7 @@
if (errorMsg == null || !errorMsg.contains("reference count = 1")) {
return e2;
}
- LOGGER.log(Level.WARNING, e2.toString(), e2);
+ LOGGER.log(Level.WARN, e2.toString(), e2);
}
} while (!done);
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/ResultExtractor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/ResultExtractor.java
index b95a283..890667a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/ResultExtractor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/ResultExtractor.java
@@ -25,10 +25,11 @@
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.commons.io.IOUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.PrettyPrinter;
import com.fasterxml.jackson.databind.JsonNode;
@@ -77,7 +78,7 @@
}
}
- private static final Logger LOGGER = Logger.getLogger(ResultExtractor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static InputStream extract(InputStream resultStream) throws Exception {
@@ -109,7 +110,7 @@
final PrettyPrinter singleLine = new SingleLinePrettyPrinter();
final ObjectNode result = OBJECT_MAPPER.readValue(resultStr, ObjectNode.class);
- LOGGER.fine("+++++++\n" + result + "\n+++++++\n");
+ LOGGER.debug("+++++++\n" + result + "\n+++++++\n");
// if we have errors field in the results, we will always return it
checkForErrors(result);
final StringBuilder resultBuilder = new StringBuilder();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index eaed8a6..bb3316d 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -53,8 +53,6 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
@@ -91,6 +89,9 @@
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
import org.apache.hyracks.util.StorageUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Assert;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -105,7 +106,7 @@
/*
* Static variables
*/
- protected static final Logger LOGGER = Logger.getLogger(TestExecutor.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
// see
// https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers/417184
private static final long MAX_URL_LENGTH = 2000l;
@@ -444,9 +445,9 @@
final File parentDir = actualFile.getParentFile();
if (!parentDir.isDirectory()) {
if (parentDir.exists()) {
- LOGGER.warning("Actual file parent \"" + parentDir + "\" exists but is not a directory");
+ LOGGER.warn("Actual file parent \"" + parentDir + "\" exists but is not a directory");
} else if (!parentDir.mkdirs()) {
- LOGGER.warning("Unable to create actual file parent dir: " + parentDir);
+ LOGGER.warn("Unable to create actual file parent dir: " + parentDir);
}
}
try (FileOutputStream out = new FileOutputStream(actualFile)) {
@@ -468,7 +469,7 @@
try {
return client.execute(method, getHttpContext());
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, e.getMessage(), e);
e.printStackTrace();
throw e;
}
@@ -491,7 +492,7 @@
result.get("stacktrace").asText() };
} catch (Exception e) {
// whoops, not JSON (e.g. 404) - just include the body
- GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errorBody);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.ERROR, errorBody);
Exception failure = new Exception("HTTP operation failed:" + "\nSTATUS LINE: "
+ httpResponse.getStatusLine() + "\nERROR_BODY: " + errorBody);
failure.addSuppressed(e);
@@ -1210,7 +1211,7 @@
ctx.setType(ctx.getType().substring("poll".length()));
boolean expectedException = false;
Exception finalException = null;
- LOGGER.fine("polling for up to " + timeoutSecs + " seconds w/ " + retryDelaySecs + " second(s) delay");
+ LOGGER.debug("polling for up to " + timeoutSecs + " seconds w/ " + retryDelaySecs + " second(s) delay");
int responsesReceived = 0;
final ExecutorService executorService = Executors.newSingleThreadExecutor();
while (true) {
@@ -1257,7 +1258,7 @@
finalException = e;
break;
}
- LOGGER.fine("sleeping " + retryDelaySecs + " second(s) before polling again");
+ LOGGER.debug("sleeping " + retryDelaySecs + " second(s) before polling again");
TimeUnit.SECONDS.sleep(retryDelaySecs);
}
}
@@ -1504,7 +1505,7 @@
path = tokens[1];
}
URI uri = new URI("http", null, endpoint.getHostString(), endpoint.getPort(), path, query, null);
- LOGGER.fine("Created endpoint URI: " + uri);
+ LOGGER.debug("Created endpoint URI: " + uri);
return uri;
}
@@ -1549,7 +1550,7 @@
}
if (!toBeDropped.isEmpty()) {
badtestcases.add(testCase);
- LOGGER.warning(
+ LOGGER.warn(
"Last test left some garbage. Dropping dataverses: " + StringUtils.join(toBeDropped, ','));
StringBuilder dropStatement = new StringBuilder();
for (String dv : toBeDropped) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java
index 52532d6..7965afb 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java
@@ -1 +1 @@
-/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jsonplan;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.logging.Logger;
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
import org.apache.asterix.api.java.AsterixJavaClient;
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.IdentitiyResolverFactory;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.asterix.test.common.TestHelper;
import org.apache.asterix.test.runtime.HDFSCluster;
import org.apache.asterix.translator.IStatementExecutorFactory;
import org.apache.asterix.translator.SessionConfig.PlanFormat;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.internal.AssumptionViolatedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
@RunWith(Parameterized.class)
public class JsonLogicalPlanTest {
private static final Logger LOGGER = Logger
.getLogger(org.apache.asterix.test.jsonplan.JsonLogicalPlanTest.class.getName());
protected static final String SEPARATOR = File.separator;
private static final String EXTENSION_AQL = "aql";
private static final String EXTENSION_SQLPP = "sqlpp";
private static final String EXTENSION_RESULT = "plan";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
+ "optimizerts" + SEPARATOR;
private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
protected static String PATH_ACTUAL = "target" + File.separator + "jplantest" + SEPARATOR;
protected static boolean optimized = false;
private static final ArrayList<String> ignore = AsterixTestHelper.readTestListFile(FILENAME_IGNORE, PATH_BASE);
private static final ArrayList<String> only = AsterixTestHelper.readTestListFile(FILENAME_ONLY, PATH_BASE);
protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
private static final ILangCompilationProvider aqlCompilationProvider = new AqlCompilationProvider();
private static final ILangCompilationProvider sqlppCompilationProvider = new SqlppCompilationProvider();
protected static ILangCompilationProvider extensionLangCompilationProvider = null;
protected static IStatementExecutorFactory statementExecutorFactory = new DefaultStatementExecutorFactory();
protected static IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
protected static AsterixHyracksIntegrationUtil integrationUtil = new AsterixHyracksIntegrationUtil();
@BeforeClass
public static void setUp() throws Exception {
System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
final File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
HDFSCluster.getInstance().setup();
integrationUtil.init(true, TEST_CONFIG_FILE_NAME);
// Set the node resolver to be the identity resolver that expects node names
// to be node controller ids; a valid assumption in test environment.
System.setProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY,
IdentitiyResolverFactory.class.getName());
}
@AfterClass
public static void tearDown() throws Exception {
File outdir = new File(PATH_ACTUAL);
File[] files = outdir.listFiles();
if (files == null || files.length == 0) {
outdir.delete();
}
HDFSCluster.getInstance().cleanup();
integrationUtil.deinit(true);
}
private static void suiteBuildPerFile(File file, Collection<Object[]> testArgs, String path) {
if (file.isDirectory() && !file.getName().startsWith(".")) {
for (File innerfile : file.listFiles()) {
String subdir = innerfile.isDirectory() ? path + innerfile.getName() + SEPARATOR : path;
suiteBuildPerFile(innerfile, testArgs, subdir);
}
}
if (file.isFile() && (file.getName().endsWith(EXTENSION_AQL) || file.getName().endsWith(EXTENSION_SQLPP))) {
String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
File actualFile = new File(PATH_ACTUAL + SEPARATOR + path + resultFileName);
testArgs.add(new Object[] { file, actualFile });
}
}
@Parameters(name = "JsonLogicalPlanTest {index}: {0}")
public static Collection<Object[]> tests() {
Collection<Object[]> testArgs = new ArrayList<>();
if (only.isEmpty()) {
suiteBuildPerFile(new File(PATH_QUERIES), testArgs, "");
} else {
for (String path : only) {
suiteBuildPerFile(new File(PATH_QUERIES + path), testArgs,
path.lastIndexOf(SEPARATOR) < 0 ? "" : path.substring(0, path.lastIndexOf(SEPARATOR) + 1));
}
}
return testArgs;
}
private final File actualFile;
private final File queryFile;
public JsonLogicalPlanTest(final File queryFile, final File actualFile) {
this.queryFile = queryFile;
this.actualFile = actualFile;
}
@Test
public void test() throws Exception {
try {
String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0),
'/');
if (!only.isEmpty()) {
boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
if (!toRun) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
+ "\" \"only.txt\" not empty and not in \"only.txt\".");
}
Assume.assumeTrue(toRun);
}
boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
if (skipped) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
}
Assume.assumeTrue(!skipped);
LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
// Forces the creation of actualFile.
actualFile.getParentFile().mkdirs();
PrintWriter plan = new PrintWriter(actualFile);
ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider
: sqlppCompilationProvider;
if (extensionLangCompilationProvider != null) {
provider = extensionLangCompilationProvider;
}
IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
AsterixJavaClient asterix = new AsterixJavaClient(
(ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc, query, plan, provider,
statementExecutorFactory, storageComponentProvider);
try {
asterix.compile(true, false, !optimized, optimized, false, false, false, PlanFormat.JSON);
} catch (AsterixException e) {
plan.close();
query.close();
throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
}
plan.close();
query.close();
BufferedReader readerActual = new BufferedReader(
new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
String lineActual, objectActual = "";
boolean firstPlan = false;
while ((lineActual = readerActual.readLine()) != null) {
if (lineActual.contains("--")) {
if (firstPlan) {
break;
}
firstPlan = true;
} else {
objectActual = objectActual + lineActual;
}
}
try {
final JsonParser parser = new ObjectMapper().getJsonFactory().createJsonParser(objectActual);
while (parser.nextToken() != null) {
}
} finally {
readerActual.close();
}
} catch (Exception e) {
if (!(e instanceof AssumptionViolatedException)) {
LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
} else {
throw e;
}
}
}
}
\ No newline at end of file
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jsonplan;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
import org.apache.asterix.api.java.AsterixJavaClient;
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.IdentitiyResolverFactory;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.asterix.test.common.TestHelper;
import org.apache.asterix.test.runtime.HDFSCluster;
import org.apache.asterix.translator.IStatementExecutorFactory;
import org.apache.asterix.translator.SessionConfig.PlanFormat;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.internal.AssumptionViolatedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
@RunWith(Parameterized.class)
public class JsonLogicalPlanTest {
private static final Logger LOGGER = LogManager.getLogger();
protected static final String SEPARATOR = File.separator;
private static final String EXTENSION_AQL = "aql";
private static final String EXTENSION_SQLPP = "sqlpp";
private static final String EXTENSION_RESULT = "plan";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
+ "optimizerts" + SEPARATOR;
private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
protected static String PATH_ACTUAL = "target" + File.separator + "jplantest" + SEPARATOR;
protected static boolean optimized = false;
private static final ArrayList<String> ignore = AsterixTestHelper.readTestListFile(FILENAME_IGNORE, PATH_BASE);
private static final ArrayList<String> only = AsterixTestHelper.readTestListFile(FILENAME_ONLY, PATH_BASE);
protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
private static final ILangCompilationProvider aqlCompilationProvider = new AqlCompilationProvider();
private static final ILangCompilationProvider sqlppCompilationProvider = new SqlppCompilationProvider();
protected static ILangCompilationProvider extensionLangCompilationProvider = null;
protected static IStatementExecutorFactory statementExecutorFactory = new DefaultStatementExecutorFactory();
protected static IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
protected static AsterixHyracksIntegrationUtil integrationUtil = new AsterixHyracksIntegrationUtil();
@BeforeClass
public static void setUp() throws Exception {
System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
final File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
HDFSCluster.getInstance().setup();
integrationUtil.init(true, TEST_CONFIG_FILE_NAME);
// Set the node resolver to be the identity resolver that expects node names
// to be node controller ids; a valid assumption in test environment.
System.setProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY,
IdentitiyResolverFactory.class.getName());
}
@AfterClass
public static void tearDown() throws Exception {
File outdir = new File(PATH_ACTUAL);
File[] files = outdir.listFiles();
if (files == null || files.length == 0) {
outdir.delete();
}
HDFSCluster.getInstance().cleanup();
integrationUtil.deinit(true);
}
private static void suiteBuildPerFile(File file, Collection<Object[]> testArgs, String path) {
if (file.isDirectory() && !file.getName().startsWith(".")) {
for (File innerfile : file.listFiles()) {
String subdir = innerfile.isDirectory() ? path + innerfile.getName() + SEPARATOR : path;
suiteBuildPerFile(innerfile, testArgs, subdir);
}
}
if (file.isFile() && (file.getName().endsWith(EXTENSION_AQL) || file.getName().endsWith(EXTENSION_SQLPP))) {
String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
File actualFile = new File(PATH_ACTUAL + SEPARATOR + path + resultFileName);
testArgs.add(new Object[] { file, actualFile });
}
}
@Parameters(name = "JsonLogicalPlanTest {index}: {0}")
public static Collection<Object[]> tests() {
Collection<Object[]> testArgs = new ArrayList<>();
if (only.isEmpty()) {
suiteBuildPerFile(new File(PATH_QUERIES), testArgs, "");
} else {
for (String path : only) {
suiteBuildPerFile(new File(PATH_QUERIES + path), testArgs,
path.lastIndexOf(SEPARATOR) < 0 ? "" : path.substring(0, path.lastIndexOf(SEPARATOR) + 1));
}
}
return testArgs;
}
private final File actualFile;
private final File queryFile;
public JsonLogicalPlanTest(final File queryFile, final File actualFile) {
this.queryFile = queryFile;
this.actualFile = actualFile;
}
@Test
public void test() throws Exception {
try {
String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0),
'/');
if (!only.isEmpty()) {
boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
if (!toRun) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
+ "\" \"only.txt\" not empty and not in \"only.txt\".");
}
Assume.assumeTrue(toRun);
}
boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
if (skipped) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
}
Assume.assumeTrue(!skipped);
LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
// Forces the creation of actualFile.
actualFile.getParentFile().mkdirs();
PrintWriter plan = new PrintWriter(actualFile);
ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider
: sqlppCompilationProvider;
if (extensionLangCompilationProvider != null) {
provider = extensionLangCompilationProvider;
}
IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
AsterixJavaClient asterix = new AsterixJavaClient(
(ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc, query, plan, provider,
statementExecutorFactory, storageComponentProvider);
try {
asterix.compile(true, false, !optimized, optimized, false, false, false, PlanFormat.JSON);
} catch (AsterixException e) {
plan.close();
query.close();
throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
}
plan.close();
query.close();
BufferedReader readerActual = new BufferedReader(
new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
String lineActual, objectActual = "";
boolean firstPlan = false;
while ((lineActual = readerActual.readLine()) != null) {
if (lineActual.contains("--")) {
if (firstPlan) {
break;
}
firstPlan = true;
} else {
objectActual = objectActual + lineActual;
}
}
try {
final JsonParser parser = new ObjectMapper().getJsonFactory().createJsonParser(objectActual);
while (parser.nextToken() != null) {
}
} finally {
readerActual.close();
}
} catch (Exception e) {
if (!(e instanceof AssumptionViolatedException)) {
LOGGER.error("Test \"" + queryFile.getPath() + "\" FAILED!");
throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
} else {
throw e;
}
}
}
}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonOptimizedLogicalPlanTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonOptimizedLogicalPlanTest.java
index b8e4595..bc699c0 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonOptimizedLogicalPlanTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonOptimizedLogicalPlanTest.java
@@ -20,7 +20,6 @@
package org.apache.asterix.test.jsonplan;
import java.io.File;
-import java.util.logging.Logger;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -28,9 +27,6 @@
@RunWith(Parameterized.class)
public class JsonOptimizedLogicalPlanTest extends JsonLogicalPlanTest {
- private static final Logger LOGGER =
- Logger.getLogger(org.apache.asterix.test.jsonplan.JsonOptimizedLogicalPlanTest.class.getName());
-
public JsonOptimizedLogicalPlanTest(File queryFile, File actualFile) {
super(queryFile, actualFile);
optimized = true;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
index 9b8afd3..8b228bd 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
@@ -26,7 +26,6 @@
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.logging.Logger;
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
import org.apache.asterix.api.java.AsterixJavaClient;
@@ -34,7 +33,6 @@
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
-import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
@@ -47,6 +45,8 @@
import org.apache.asterix.translator.IStatementExecutorFactory;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
@@ -59,7 +59,7 @@
@RunWith(Parameterized.class)
public class OptimizerTest {
- private static final Logger LOGGER = Logger.getLogger(OptimizerTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String SEPARATOR = File.separator;
private static final String EXTENSION_AQL = "aql";
@@ -230,7 +230,7 @@
}
} catch (Exception e) {
if (!(e instanceof AssumptionViolatedException)) {
- LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
+ LOGGER.error("Test \"" + queryFile.getPath() + "\" FAILED!");
throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
} else {
throw e;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
index b7ffcf2..c7ae2df 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
@@ -22,8 +22,6 @@
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -37,10 +35,12 @@
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExecutionTestUtil {
- protected static final Logger LOGGER = Logger.getLogger(ExecutionTestUtil.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected static final String PATH_ACTUAL = "rttest" + File.separator;
@@ -60,11 +60,11 @@
AsterixHyracksIntegrationUtil alternateIntegrationUtil, boolean startHdfs, List<Pair<IOption, Object>> opts)
throws Exception {
System.out.println("Starting setup");
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting setup");
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("initializing pseudo cluster");
}
integrationUtil = alternateIntegrationUtil;
@@ -75,7 +75,7 @@
}
integrationUtil.init(cleanup, configFile);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("initializing HDFS");
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
index 9d35d89..a89304a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
@@ -31,8 +31,6 @@
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
/**
* Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
@@ -73,7 +71,6 @@
conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
cleanupLocal();
- setLoggingLevel(Level.WARN);
conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, MINIDFS_BASEDIR);
MiniDFSCluster.Builder build = new MiniDFSCluster.Builder(conf);
build.nameNodePort(nameNodePort);
@@ -84,11 +81,6 @@
loadData(basePath);
}
- private void setLoggingLevel(Level level) {
- Logger rootLogger = Logger.getRootLogger();
- rootLogger.setLevel(level);
- }
-
private void loadData(String localDataRoot) throws IOException {
Path destDir = new Path(HDFS_PATH);
dfs.mkdirs(destDir);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java
index 24a1872..35bcff5 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java
@@ -21,10 +21,11 @@
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.logging.Logger;
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -35,7 +36,7 @@
@RunWith(Parameterized.class)
public class OptimizerParserTest {
- private static final Logger LOGGER = Logger.getLogger(OptimizerParserTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String EXTENSION_QUERY = "sqlpp";
private static final String EXTENSION_RESULT = "ast";
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
index 493af30..d78cd94 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
@@ -152,7 +152,7 @@
runScriptAndCompareWithResult(queryFile, new PrintWriter(System.err), expectedFile, actualResultFile,
ComparisonEnum.TEXT);
} catch (Exception e) {
- GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + queryFile);
+ GlobalConfig.ASTERIX_LOGGER.warn("Failed while testing file " + queryFile);
throw e;
} finally {
writer.close();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java
index 9c3c393..6bdf351 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java
@@ -21,11 +21,11 @@
import java.io.File;
import java.util.Collection;
import java.util.List;
-import java.util.logging.Logger;
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.asterix.test.common.TestHelper;
import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.Logger;
import org.junit.Assume;
import org.junit.internal.AssumptionViolatedException;
@@ -74,7 +74,7 @@
} catch (Exception e) {
if (!(e instanceof AssumptionViolatedException)) {
final String msg = "Test \"" + queryFile.getPath() + "\" FAILED!";
- logger.severe(msg);
+ logger.error(msg);
throw new Exception(msg, e);
} else {
throw e;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java
index 0cac0c5..054da38 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java
@@ -21,11 +21,12 @@
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.logging.Logger;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -40,7 +41,7 @@
@RunWith(Parameterized.class)
public class RuntimeParserTest {
- protected static final Logger LOGGER = Logger.getLogger(RuntimeParserTest.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected static final String PATH_ACTUAL = "target" + File.separator + "runtime_parserts" + File.separator;
protected static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources", "runtimets" },
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java
index b9b0cd6..b7a316d 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java
@@ -21,10 +21,11 @@
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.logging.Logger;
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -35,7 +36,7 @@
@RunWith(Parameterized.class)
public class SmokeParserTest {
- private static final Logger LOGGER = Logger.getLogger(SmokeParserTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String EXTENSION_QUERY = "sqlpp";
private static final String EXTENSION_RESULT = "ast";
diff --git a/asterixdb/asterix-app/src/test/resources/log4j2-test.xml b/asterixdb/asterix-app/src/test/resources/log4j2-test.xml
new file mode 100644
index 0000000..d17fad7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/log4j2-test.xml
@@ -0,0 +1,31 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements. See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership. The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in complianceo
+ ! with the License. You may obtain a copy of the License at
+ !
+ ! http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied. See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<Configuration status="WARN">
+ <Appenders>
+ <Console name="Console" target="SYSTEM_ERR">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
+ </Console>
+ </Appenders>
+ <Loggers>
+ <Root level="WARN"/>
+ <Logger name="org.apache.asterix.test" level="WARN">
+ <AppenderRef ref="Console"/>
+ </Logger>
+ </Loggers>
+</Configuration>
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
index 941c8bb..1160758 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.regexadm
@@ -13,7 +13,7 @@
"compiler\.sortmemory" : 327680,
"default\.dir" : "target/io/dir/asterixdb",
"instance\.name" : "DEFAULT_INSTANCE",
- "log\.level" : "WARNING",
+ "log\.level" : "WARN",
"max\.wait\.active\.cluster" : 60,
"messaging\.frame\.count" : 512,
"messaging\.frame\.size" : 4096,
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
index bb495cd..d1c9fe0 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.regexadm
@@ -13,7 +13,7 @@
"compiler\.sortmemory" : 327680,
"default\.dir" : "target/io/dir/asterixdb",
"instance\.name" : "DEFAULT_INSTANCE",
- "log\.level" : "WARNING",
+ "log\.level" : "WARN",
"max\.wait\.active\.cluster" : 60,
"messaging\.frame\.count" : 512,
"messaging\.frame\.size" : 4096,
diff --git a/asterixdb/asterix-common/pom.xml b/asterixdb/asterix-common/pom.xml
index b909e91..8816bd0 100644
--- a/asterixdb/asterix-common/pom.xml
+++ b/asterixdb/asterix-common/pom.xml
@@ -240,10 +240,6 @@
<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
</dependency>
<dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </dependency>
- <dependency>
<groupId>com.rometools</groupId>
<artifactId>rome</artifactId>
<scope>test</scope>
@@ -291,5 +287,9 @@
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
index e718903..4172a72 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
@@ -25,6 +25,7 @@
import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.config.IOptionType;
import org.apache.hyracks.api.config.Section;
+import org.apache.logging.log4j.Level;
public class ExternalProperties extends AbstractProperties {
@@ -34,7 +35,7 @@
API_PORT(INTEGER, 19002, "The listen port of the API server"),
ACTIVE_PORT(INTEGER, 19003, "The listen port of the active server"),
NC_API_PORT(INTEGER, 19004, "The listen port of the node controller API server"),
- LOG_LEVEL(LEVEL, java.util.logging.Level.WARNING, "The logging level for master and slave processes"),
+ LOG_LEVEL(LEVEL, Level.WARN, "The logging level for master and slave processes"),
MAX_WAIT_ACTIVE_CLUSTER(INTEGER, 60, "The max pending time (in seconds) for cluster startup. After the " +
"threshold, if the cluster still is not up and running, it is considered unavailable"),
CC_JAVA_OPTS(STRING, "-Xmx1024m", "The JVM options passed to the cluster controller process by managix"),
@@ -107,7 +108,7 @@
return accessor.getInt(Option.ACTIVE_PORT);
}
- public java.util.logging.Level getLogLevel() {
+ public Level getLogLevel() {
return accessor.getLoggingLevel(Option.LOG_LEVEL);
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/GlobalConfig.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/GlobalConfig.java
index bc8fd5f..67d974b 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/GlobalConfig.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/GlobalConfig.java
@@ -20,14 +20,16 @@
import java.util.LinkedHashMap;
import java.util.Map;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class GlobalConfig {
public static final boolean DEBUG = true;
public static final String ASTERIX_LOGGER_NAME = "org.apache.asterix";
- public static final Logger ASTERIX_LOGGER = Logger.getLogger(ASTERIX_LOGGER_NAME);
+ public static final Logger ASTERIX_LOGGER = LogManager.getLogger(ASTERIX_LOGGER_NAME);
public static final String DEFAULT_CONFIG_FILE_NAME = "asterix-configuration.xml";
@@ -60,7 +62,7 @@
try {
fz = Integer.parseInt(frameSizeStr);
} catch (NumberFormatException nfe) {
- GlobalConfig.ASTERIX_LOGGER.warning("Wrong frame size size argument. Picking default value ("
+ GlobalConfig.ASTERIX_LOGGER.warn("Wrong frame size size argument. Picking default value ("
+ GlobalConfig.DEFAULT_FRAME_SIZE + ") instead.\n");
}
if (fz >= 0) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
index 150705e..9c002b5 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
@@ -35,8 +35,6 @@
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -49,9 +47,11 @@
import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig;
import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class PropertiesAccessor implements IApplicationConfig {
- private static final Logger LOGGER = Logger.getLogger(PropertiesAccessor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final Map<IApplicationConfig, PropertiesAccessor> instances = new ConcurrentHashMap<>();
private final Map<String, String[]> stores = new HashMap<>();
@@ -177,8 +177,8 @@
try {
return value == null ? defaultValue : interpreter.parse(value);
} catch (IllegalArgumentException e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.severe("Invalid property value '" + value + "' for property '" + property + "'.\n" + "Default = "
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.error("Invalid property value '" + value + "' for property '" + property + "'.\n" + "Default = "
+ defaultValue);
}
throw e;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetInfo.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetInfo.java
index f703c19..9d63818 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetInfo.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetInfo.java
@@ -22,14 +22,14 @@
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetInfo extends Info implements Comparable<DatasetInfo> {
- private static final Logger LOGGER = Logger.getLogger(DatasetInfo.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<Long, IndexInfo> indexes;
private final int datasetID;
private int numActiveIOOps;
@@ -205,8 +205,8 @@
}
}
if (numActiveIOOps < 0) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.severe("Number of IO operations cannot be negative for dataset: " + this);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.error("Number of IO operations cannot be negative for dataset: " + this);
}
throw new IllegalStateException("Number of IO operations cannot be negative");
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
index 6a1ebfb..5d3d125 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
@@ -25,8 +25,6 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.api.IDatasetMemoryManager;
@@ -54,10 +52,12 @@
import org.apache.hyracks.storage.common.IIndex;
import org.apache.hyracks.storage.common.ILocalResourceRepository;
import org.apache.hyracks.storage.common.LocalResource;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetLifecycleManager implements IDatasetLifecycleManager, ILifeCycleComponent {
- private static final Logger LOGGER = Logger.getLogger(DatasetLifecycleManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<Integer, DatasetResource> datasets = new ConcurrentHashMap<>();
private final StorageProperties storageProperties;
private final ILocalResourceRepository resourceRepository;
@@ -141,11 +141,11 @@
PrimaryIndexOperationTracker opTracker = dsr.getOpTracker();
if (iInfo.getReferenceCount() != 0 || (opTracker != null && opTracker.getNumActiveOperations() != 0)) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
+ if (LOGGER.isErrorEnabled()) {
final String logMsg = String.format(
"Failed to drop in-use index %s. Ref count (%d), Operation tracker active ops (%d)",
resourcePath, iInfo.getReferenceCount(), opTracker.getNumActiveOperations());
- LOGGER.severe(logMsg);
+ LOGGER.error(logMsg);
}
throw HyracksDataException.create(ErrorCode.CANNOT_DROP_IN_USE_INDEX,
StoragePathUtil.getIndexNameFromPath(resourcePath));
@@ -366,9 +366,7 @@
|| opTracker.isFlushLogCreated() || opTracker.isFlushOnExit())) {
long firstLSN = ioCallback.getFirstLSN();
if (firstLSN < targetLSN) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Checkpoint flush dataset " + dsr.getDatasetID());
- }
+ LOGGER.info("Checkpoint flush dataset {}", dsr.getDatasetID());
opTracker.setFlushOnExit(true);
if (opTracker.getNumActiveOperations() == 0) {
// No Modify operations currently, we need to trigger the flush and we can do so safely
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java
index 88f406e..e839d8c 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java
@@ -20,17 +20,18 @@
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IDatasetMemoryManager;
import org.apache.asterix.common.config.StorageProperties;
import org.apache.asterix.common.metadata.MetadataIndexImmutableProperties;
import org.apache.hyracks.util.annotations.ThreadSafe;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@ThreadSafe
public class DatasetMemoryManager implements IDatasetMemoryManager {
- private static final Logger LOGGER = Logger.getLogger(DatasetMemoryManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<Integer, Long> allocatedMap = new HashMap<>();
private final Map<Integer, Long> reservedMap = new HashMap<>();
private long available;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/MetadataException.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/MetadataException.java
index f04d19d..085f465 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/MetadataException.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/MetadataException.java
@@ -20,12 +20,14 @@
package org.apache.asterix.common.exceptions;
import java.io.Serializable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MetadataException extends CompilationException {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(MetadataException.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
@Deprecated
/**
@@ -69,7 +71,7 @@
return (MetadataException) cause;
}
if (cause instanceof InterruptedException && !Thread.currentThread().isInterrupted()) {
- LOGGER.log(Level.WARNING, "Wrapping an InterruptedException in " + MetadataException.class.getSimpleName()
+ LOGGER.log(Level.WARN, "Wrapping an InterruptedException in " + MetadataException.class.getSimpleName()
+ " and current thread is not interrupted", cause);
}
return new MetadataException(cause);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java
index d57a4fc..d522aa5 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java
@@ -25,10 +25,11 @@
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ConcurrentFramePool {
private static final boolean DEBUG = false;
@@ -36,7 +37,7 @@
+ "multiple of the default frame size";
private static final String ERROR_LARGER_THAN_BUDGET_REQUEST = "The requested frame size"
+ " must not be greater than the allocated budget";
- private static final Logger LOGGER = Logger.getLogger(ConcurrentFramePool.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final String nodeId;
private final int budget;
private final int defaultFrameSize;
@@ -233,7 +234,7 @@
try {
frameAction.call(freeBuffer);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE,
+ LOGGER.log(Level.ERROR,
"Error while attempting to answer a subscription. Buffer will be reclaimed", e);
// TODO(amoudi): Add test cases and get rid of recursion
if (handedOut == handedOutBeforeCall) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/FrameAction.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/FrameAction.java
index 366125a..c41f960 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/FrameAction.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/FrameAction.java
@@ -19,11 +19,13 @@
package org.apache.asterix.common.memory;
import java.nio.ByteBuffer;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FrameAction {
private static final boolean DEBUG = false;
- private static final Logger LOGGER = Logger.getLogger(FrameAction.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ByteBuffer allocated;
private ByteBuffer frame;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/ChainedDeclusteringReplicationStrategy.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/ChainedDeclusteringReplicationStrategy.java
index 1ef798c..6f66599 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/ChainedDeclusteringReplicationStrategy.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/ChainedDeclusteringReplicationStrategy.java
@@ -21,18 +21,18 @@
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.hyracks.api.config.IConfigManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ChainedDeclusteringReplicationStrategy implements IReplicationStrategy {
- private static final Logger LOGGER = Logger.getLogger(ChainedDeclusteringReplicationStrategy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private int replicationFactor;
private ReplicationProperties repProp;
private ConfigManager configManager;
@@ -49,8 +49,8 @@
int nodeIndex = repProp.getNodeIds().indexOf(nodeId);
if (nodeIndex == -1) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Could not find node " + nodeId + " in cluster configurations");
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Could not find node " + nodeId + " in cluster configurations");
}
return Collections.emptySet();
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/InvokeUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/InvokeUtil.java
index c7ac0f4..c718fca 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/InvokeUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/InvokeUtil.java
@@ -21,12 +21,14 @@
import java.io.IOException;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class InvokeUtil {
- private static final Logger LOGGER = Logger.getLogger(InvokeUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/**
* Executes the passed interruptible, retrying if the operation is interrupted. Once the interruptible
@@ -136,7 +138,7 @@
}
} catch (Exception e) {
// ignore, retry after delay
- LOGGER.log(Level.FINE, "Ignoring exception on retryLoop attempt, will retry after delay", e);
+ LOGGER.log(Level.DEBUG, "Ignoring exception on retryLoop attempt, will retry after delay", e);
}
}
return false;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
index d2c5ad7..e83a75d 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
@@ -33,12 +33,12 @@
import org.apache.hyracks.api.io.MappedFileSplit;
import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class StoragePathUtil {
- private static final Logger LOGGER = Logger.getLogger(StoragePathUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static Function<IndexPathElements, String> indexPathProvider;
private StoragePathUtil() {
@@ -126,7 +126,7 @@
if (!success) {
throw new HyracksDataException("Unable to create spill file " + fileName);
} else {
- if (LOGGER.isEnabledFor(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Created spill file " + file.getAbsolutePath());
}
}
diff --git a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/client/FileFeedSocketAdapterClient.java b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/client/FileFeedSocketAdapterClient.java
index 852173c..0e24b12 100644
--- a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/client/FileFeedSocketAdapterClient.java
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/client/FileFeedSocketAdapterClient.java
@@ -25,8 +25,10 @@
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FileFeedSocketAdapterClient implements ITestClient {
private final int port;
@@ -37,7 +39,7 @@
private int batchSize;
private int maxCount;
private OutputStream out = null;
- static final Logger LOGGER = Logger.getLogger(FileFeedSocketAdapterClient.class.getName());
+ static final Logger LOGGER = LogManager.getLogger();
// expected args: url, source-file-path, max-count, batch-size, wait
public FileFeedSocketAdapterClient(int port, String[] args) throws Exception {
@@ -61,7 +63,7 @@
try {
socket = new Socket(url, port);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Problem in creating socket against host " + url + " on the port " + port, e);
+ LOGGER.log(Level.WARN, "Problem in creating socket against host " + url + " on the port " + port, e);
throw e;
}
@@ -78,7 +80,7 @@
}
out.write(b.array(), 0, b.limit());
recordCount++;
- LOGGER.log(Level.FINE, "One record filed into feed");
+ LOGGER.log(Level.DEBUG, "One record filed into feed");
if (recordCount == maxCount) {
break;
}
diff --git a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/server/RSSFeedServlet.java b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/server/RSSFeedServlet.java
index 2ebed83..639e036 100644
--- a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/server/RSSFeedServlet.java
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/server/RSSFeedServlet.java
@@ -25,13 +25,13 @@
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
import com.rometools.rome.feed.synd.SyndContent;
import com.rometools.rome.feed.synd.SyndContentImpl;
@@ -71,7 +71,7 @@
SyndFeedOutput output = new SyndFeedOutput();
output.output(feed, res.writer());
} catch (FeedException | ParseException ex) {
- GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, ex.getMessage(), ex);
+ GlobalConfig.ASTERIX_LOGGER.log(Level.WARN, ex.getMessage(), ex);
String msg = COULD_NOT_GENERATE_FEED_ERROR;
res.writer().print(msg);
res.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
diff --git a/asterixdb/asterix-external-data/pom.xml b/asterixdb/asterix-external-data/pom.xml
index 6eca5c2..2a92e4b 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -318,10 +318,6 @@
<artifactId>hyracks-storage-am-lsm-btree</artifactId>
</dependency>
<dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </dependency>
- <dependency>
<groupId>org.apache.hyracks</groupId>
<artifactId>algebricks-data</artifactId>
</dependency>
@@ -416,5 +412,9 @@
<artifactId>commons-collections4</artifactId>
<version>4.1</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index 0a47788..1bb9c11 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -20,8 +20,6 @@
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -54,11 +52,14 @@
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileSplit;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterFactory {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(GenericAdapterFactory.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private IExternalDataSourceFactory dataSourceFactory;
private IDataParserFactory dataParserFactory;
private ARecordType recordType;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
index b5adb32..10815d9 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
@@ -21,8 +21,6 @@
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -35,13 +33,15 @@
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CounterTimerTupleForwarder implements ITupleForwarder {
public static final String BATCH_SIZE = "batch-size";
public static final String BATCH_INTERVAL = "batch-interval";
- private static final Logger LOGGER = Logger.getLogger(CounterTimerTupleForwarder.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private FrameTupleAppender appender;
private IFrame frame;
@@ -105,7 +105,7 @@
private void addTupleToFrame(ArrayTupleBuilder tb) throws HyracksDataException {
if (tuplesInFrame == batchSize
|| !appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("flushing frame containg (" + tuplesInFrame + ") tuples");
}
FrameUtils.flushFrame(frame.getBuffer(), writer);
@@ -148,7 +148,7 @@
public void run() {
try {
if (tuplesInFrame > 0) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TTL expired flushing frame (" + tuplesInFrame + ")");
}
synchronized (lock) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
index 824f51a..8026efe 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
@@ -20,8 +20,6 @@
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -34,6 +32,9 @@
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowController {
public static final String INCOMING_RECORDS_COUNT_FIELD_NAME = "incoming-records-count";
@@ -45,7 +46,7 @@
STOPPED
}
- private static final Logger LOGGER = Logger.getLogger(FeedRecordDataFlowController.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IRecordDataParser<T> dataParser;
private final IRecordReader<T> recordReader;
protected final AtomicBoolean closed = new AtomicBoolean(false);
@@ -90,7 +91,7 @@
}
}
} catch (HyracksDataException e) {
- LOGGER.log(Level.WARNING, "Exception during ingestion", e);
+ LOGGER.log(Level.WARN, "Exception during ingestion", e);
//if interrupted while waiting for a new record, then it is safe to not fail forward
if (e.getComponent() == ErrorCode.ASTERIX
&& (e.getErrorCode() == ErrorCode.FEED_STOPPED_WHILE_WAITING_FOR_A_NEW_RECORD)) {
@@ -113,7 +114,7 @@
} catch (Exception e) {
failure = e;
tupleForwarder.fail();
- LOGGER.log(Level.WARNING, "Failure while operating a feed source", e);
+ LOGGER.log(Level.WARN, "Failure while operating a feed source", e);
} finally {
failure = finish(failure);
}
@@ -178,7 +179,7 @@
try {
recordReader.close();
} catch (Exception th) {
- LOGGER.log(Level.WARNING, "Failure during while operating a feed source", th);
+ LOGGER.log(Level.WARN, "Failure during while operating a feed source", th);
hde = HyracksDataException.suppress(hde, th);
}
try {
@@ -203,7 +204,7 @@
try {
dataParser.parse(record, tb.getDataOutput());
} catch (Exception e) {
- LOGGER.log(Level.WARNING, ExternalDataConstants.ERROR_PARSE_RECORD, e);
+ LOGGER.log(Level.WARN, ExternalDataConstants.ERROR_PARSE_RECORD, e);
feedLogManager.logRecord(record.toString(), ExternalDataConstants.ERROR_PARSE_RECORD);
// continue the outer loop
return false;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
index e780834..82251b6 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
@@ -26,11 +26,9 @@
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import org.apache.log4j.Logger;
public class RecordDataFlowController<T> extends AbstractDataFlowController {
- private static final Logger LOGGER = Logger.getLogger(RecordDataFlowController.class.getName());
protected final IRecordDataParser<T> dataParser;
protected final IRecordReader<? extends T> recordReader;
protected final int numOfTupleFields;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
index a8c6056..2fda99f 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
@@ -19,8 +19,6 @@
package org.apache.asterix.external.feed.dataflow;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.IExceptionHandler;
import org.apache.asterix.external.util.FeedFrameUtil;
@@ -28,10 +26,13 @@
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FeedExceptionHandler implements IExceptionHandler {
- private static Logger LOGGER = Logger.getLogger(FeedExceptionHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
// TODO: Enable logging
private final IHyracksTaskContext ctx;
@@ -53,8 +54,8 @@
logExceptionCausingTuple(tupleIndex, th);
} catch (Exception ex) {
ex.addSuppressed(th);
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Unable to log exception causing tuple due to..." + ex.getMessage());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Unable to log exception causing tuple due to..." + ex.getMessage());
}
}
// TODO: Improve removeBadTuple. Right now, it creates lots of objects
@@ -64,8 +65,8 @@
}
} catch (Exception exception) {
exception.printStackTrace();
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Unable to handle exception " + exception.getMessage());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Unable to handle exception " + exception.getMessage());
}
return null;
}
@@ -73,6 +74,6 @@
// TODO: Fix logging of exceptions
private void logExceptionCausingTuple(int tupleIndex, Throwable e) throws HyracksDataException {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
index 90a8852..df5a3c4 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
@@ -21,8 +21,6 @@
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActiveRuntimeId;
import org.apache.asterix.common.memory.ConcurrentFramePool;
@@ -35,6 +33,9 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* TODO: Add Failure cases unit tests for this class
@@ -49,7 +50,7 @@
**/
public class FeedRuntimeInputHandler extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(FeedRuntimeInputHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final double MAX_SPILL_USED_BEFORE_RESUME = 0.8;
private static final boolean DEBUG = false;
private static final ByteBuffer POISON_PILL = ByteBuffer.allocate(0);
@@ -109,7 +110,7 @@
try {
inbox.put(FAIL);
} catch (InterruptedException e) {
- LOGGER.log(Level.WARNING, "interrupted", e);
+ LOGGER.log(Level.WARN, "interrupted", e);
Thread.currentThread().interrupt();
}
}
@@ -124,7 +125,7 @@
inbox.put(POISON_PILL);
consumerThread.join();
} catch (InterruptedException e) {
- LOGGER.log(Level.WARNING, "interrupted", e);
+ LOGGER.log(Level.WARN, "interrupted", e);
Thread.currentThread().interrupt();
}
try {
@@ -132,7 +133,7 @@
spiller.close();
}
} catch (Throwable th) {
- LOGGER.log(Level.WARNING, "exception closing spiller", th);
+ LOGGER.log(Level.WARN, "exception closing spiller", th);
} finally {
writer.close();
}
@@ -159,8 +160,8 @@
discard(frame);
break;
default:
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Ignoring incoming tuples in " + mode + " mode");
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Ignoring incoming tuples in " + mode + " mode");
}
break;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameSpiller.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameSpiller.java
index 09e03a3..e3630d3 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameSpiller.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameSpiller.java
@@ -32,7 +32,8 @@
import org.apache.hyracks.api.comm.VSizeFrame;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* A {@link FrameSpiller} is used with feeds when "spill.to.disk.on.congestion" is set to true. The spiller spills
@@ -42,7 +43,7 @@
* "max.spill.size.on.disk"
*/
public class FrameSpiller {
- private static final Logger LOGGER = Logger.getLogger(FrameSpiller.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int FRAMES_PER_FILE = 1024;
public static final double MAX_SPILL_USED_BEFORE_RESUME = 0.8;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java
index 9431a24..17056a0 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java
@@ -24,14 +24,14 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.external.generator.DataGenerator.TweetMessage;
import org.apache.asterix.external.generator.DataGenerator.TweetMessageIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TweetGenerator {
- private static final Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final String KEY_DURATION = "duration";
public static final String KEY_TPS = "tps";
@@ -105,7 +105,7 @@
if (outputBuffer.position() > 0) {
flush();
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Reached end of batch. Tweet Count: [" + partition + "]" + tweetCount);
}
return false;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
index 5212892..353e3ef 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
@@ -30,7 +30,6 @@
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Random;
-import java.util.logging.Logger;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.InputSplit;
@@ -39,10 +38,11 @@
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
-import org.apache.hyracks.hdfs.scheduler.Scheduler;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IndexingScheduler {
- private static final Logger LOGGER = Logger.getLogger(Scheduler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/** a list of NCs */
private String[] NCs;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/RCLookupReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/RCLookupReader.java
index 3f9d90e..c404b80 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/RCLookupReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/RCLookupReader.java
@@ -31,14 +31,15 @@
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Writable;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RCLookupReader extends AbstractHDFSLookupRecordReader<BytesRefArrayWritable> {
public RCLookupReader(ExternalFileIndexAccessor snapshotAccessor, FileSystem fs, Configuration conf) {
super(snapshotAccessor, fs, conf);
}
- private static final Logger LOGGER = Logger.getLogger(RCLookupReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private Reader reader;
private LongWritable key = new LongWritable();
private BytesRefArrayWritable value = new BytesRefArrayWritable();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/SequenceLookupReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/SequenceLookupReader.java
index 23e647f..46ae9ac 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/SequenceLookupReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/SequenceLookupReader.java
@@ -30,7 +30,8 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class SequenceLookupReader extends AbstractCharRecordLookupReader {
@@ -38,7 +39,7 @@
super(snapshotAccessor, fs, conf);
}
- private static final Logger LOGGER = Logger.getLogger(SequenceLookupReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private Reader reader;
private Writable key;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/TextLookupReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/TextLookupReader.java
index 2e1a11a..1ddeedb 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/TextLookupReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/TextLookupReader.java
@@ -25,7 +25,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TextLookupReader extends AbstractCharRecordLookupReader {
@@ -33,7 +34,7 @@
super(snapshotAccessor, fs, conf);
}
- private static final Logger LOGGER = Logger.getLogger(TextLookupReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private HDFSTextLineReader reader;
@Override
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
index 44da608..b847b23 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
@@ -30,7 +30,8 @@
import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
import org.apache.asterix.external.input.record.GenericRecord;
import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.rometools.fetcher.FeedFetcher;
import com.rometools.fetcher.FetcherEvent;
@@ -45,7 +46,7 @@
public class RSSRecordReader implements IRecordReader<SyndEntry> {
- private static final Logger LOGGER = Logger.getLogger(RSSRecordReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private boolean modified = false;
private Queue<SyndEntry> rssFeedBuffer = new LinkedList<>();
private FeedFetcherCache feedInfoCache;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
index 3fb1d5f..8182dcd 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
@@ -22,8 +22,6 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -40,13 +38,15 @@
import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import twitter4j.FilterQuery;
public class TwitterRecordReaderFactory implements IRecordReaderFactory<String> {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(TwitterRecordReaderFactory.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String DEFAULT_INTERVAL = "10"; // 10 seconds
private static final int INTAKE_CARDINALITY = 1; // degree of parallelism at intake stage
@@ -113,8 +113,8 @@
}
} else {
configuration.put(SearchAPIConstants.INTERVAL, DEFAULT_INTERVAL);
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning(" Parameter " + SearchAPIConstants.INTERVAL + " not defined, using default ("
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn(" Parameter " + SearchAPIConstants.INTERVAL + " not defined, using default ("
+ DEFAULT_INTERVAL + ")");
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
index c5ca129..36781fe 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
@@ -21,8 +21,6 @@
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.ExceptionUtils;
@@ -32,10 +30,13 @@
import org.apache.asterix.external.util.FeedLogManager;
import org.apache.asterix.external.util.FileSystemWatcher;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LocalFSInputStream extends AsterixInputStream {
- private static final Logger LOGGER = Logger.getLogger(LocalFSInputStream.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final FileSystemWatcher watcher;
private FileInputStream in;
private byte lastByte;
@@ -163,18 +164,18 @@
try {
logManager.logRecord(currentFile.getAbsolutePath(), "Corrupted input file");
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Filed to write to feed log file", e);
+ LOGGER.log(Level.WARN, "Filed to write to feed log file", e);
}
- LOGGER.log(Level.WARNING, "Corrupted input file: " + currentFile.getAbsolutePath());
+ LOGGER.log(Level.WARN, "Corrupted input file: " + currentFile.getAbsolutePath());
}
try {
advance();
return true;
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "An exception was thrown while trying to skip a file", e);
+ LOGGER.log(Level.WARN, "An exception was thrown while trying to skip a file", e);
}
}
- LOGGER.log(Level.WARNING, "Failed to recover from failure", th);
+ LOGGER.log(Level.WARN, "Failed to recover from failure", th);
return false;
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java
index ea55b5b..b487b40 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java
@@ -25,10 +25,11 @@
import org.apache.asterix.external.api.AsterixInputStream;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class SocketServerInputStream extends AsterixInputStream {
- private static final Logger LOGGER = Logger.getLogger(SocketServerInputStream.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ServerSocket server;
private Socket socket;
private InputStream connectionStream;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java
index 3928b19..0b9c4a2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java
@@ -25,16 +25,15 @@
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.external.api.AsterixInputStream;
import org.apache.asterix.external.generator.TweetGenerator;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TwitterFirehoseInputStream extends AsterixInputStream {
- private static final Logger LOGGER = Logger.getLogger(TwitterFirehoseInputStream.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ExecutorService executorService;
private final PipedOutputStream outputStream;
private final PipedInputStream inputStream;
@@ -144,7 +143,7 @@
os.close();
break;
} catch (Exception e) {
- LOGGER.warning("Exception in adapter " + e.getMessage());
+ LOGGER.warn("Exception in adapter " + e.getMessage());
}
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
index d7afa13..6d91067 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
@@ -25,8 +25,6 @@
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
@@ -46,13 +44,16 @@
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.UnmanagedFileSplit;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LocalFSInputStreamFactory implements IInputStreamFactory {
private static final long serialVersionUID = 1L;
protected static final INodeResolver DEFAULT_NODE_RESOLVER = new NodeResolverFactory().createNodeResolver();
- protected static final Logger LOGGER = Logger.getLogger(LocalFSInputStreamFactory.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected static INodeResolver nodeResolver;
protected Map<String, String> configuration;
protected UnmanagedFileSplit[] inputFileSplits;
@@ -155,8 +156,8 @@
.createNodeResolver();
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Unable to create node resolver from the configured classname "
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Unable to create node resolver from the configured classname "
+ configuredNodeResolverFactory + "\n" + e.getMessage());
}
nodeResolver = DEFAULT_NODE_RESOLVER;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
index ac474a5..8f35c80 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
@@ -23,8 +23,6 @@
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -93,10 +91,13 @@
import org.apache.asterix.om.util.container.IObjectPool;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.util.string.UTF8StringReader;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class JObjectAccessors {
- private static final Logger LOGGER = Logger.getLogger(JObjectAccessors.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private JObjectAccessors() {
}
@@ -543,7 +544,7 @@
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure while accessing a java record", e);
+ LOGGER.log(Level.WARN, "Failure while accessing a java record", e);
throw HyracksDataException.create(e);
}
return jRecord;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
index fe4f93c..337946b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
@@ -29,12 +29,12 @@
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
import org.apache.hyracks.storage.common.IIndex;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExternalDatasetIndexesCommitOperatorDescriptor extends AbstractExternalDatasetIndexesOperatorDescriptor {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER =
- Logger.getLogger(ExternalDatasetIndexesCommitOperatorDescriptor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public ExternalDatasetIndexesCommitOperatorDescriptor(IOperatorDescriptorRegistry spec,
List<IIndexDataflowHelperFactory> indexesDataflowHelperFactories) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
index 770e978..e7f8564 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -19,7 +19,6 @@
package org.apache.asterix.external.operators;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.asterix.active.EntityId;
import org.apache.asterix.common.api.INcApplicationContext;
@@ -37,6 +36,8 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* An operator responsible for establishing connection with external data source and parsing,
@@ -48,7 +49,7 @@
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(FeedIntakeOperatorDescriptor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/** The unique identifier of the feed that is being ingested. **/
private final EntityId feedId;
@@ -117,7 +118,7 @@
RuntimeDataException err = new RuntimeDataException(
ErrorCode.OPERATORS_FEED_INTAKE_OPERATOR_DESCRIPTOR_CLASSLOADER_NOT_CONFIGURED, adaptorLibraryName,
feedId.getDataverse());
- LOGGER.severe(err.getMessage());
+ LOGGER.error(err.getMessage());
throw err;
}
return adapterFactory;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
index 7907e69..e1c339c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
@@ -18,9 +18,6 @@
*/
package org.apache.asterix.external.operators;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.active.ActiveRuntimeId;
import org.apache.asterix.active.ActiveSourceOperatorNodePushable;
import org.apache.asterix.active.EntityId;
@@ -35,6 +32,9 @@
import org.apache.hyracks.api.util.HyracksConstants;
import org.apache.hyracks.dataflow.common.io.MessagingFrameTupleAppender;
import org.apache.hyracks.dataflow.common.utils.TaskUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* The runtime for @see{FeedIntakeOperationDescriptor}.
@@ -42,7 +42,7 @@
* The artifacts are lazily activated when a feed receives a subscription request.
*/
public class FeedIntakeOperatorNodePushable extends ActiveSourceOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(FeedIntakeOperatorNodePushable.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
// TODO: Make configurable https://issues.apache.org/jira/browse/ASTERIXDB-2065
public static final int DEFAULT_ABORT_TIMEOUT = 10000;
private final FeedIntakeOperatorDescriptor opDesc;
@@ -81,7 +81,7 @@
message.getBuffer().flip();
run();
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure during data ingestion", e);
+ LOGGER.log(Level.WARN, "Failure during data ingestion", e);
throw e;
} finally {
writer.close();
@@ -98,7 +98,7 @@
Thread.currentThread().interrupt();
throw HyracksDataException.create(e);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unhandled Exception", e);
+ LOGGER.log(Level.WARN, "Unhandled Exception", e);
throw HyracksDataException.create(e);
}
}
@@ -113,7 +113,7 @@
} catch (InterruptedException e) {
throw e;
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception during feed ingestion ", e);
+ LOGGER.log(Level.WARN, "Exception during feed ingestion ", e);
throw HyracksDataException.create(e);
}
}
@@ -131,10 +131,10 @@
}
} catch (HyracksDataException hde) {
if (hde.getComponent() == ErrorCode.HYRACKS && hde.getErrorCode() == ErrorCode.TIMEOUT) {
- LOGGER.log(Level.WARNING, runtimeId + " stop adapter timed out. interrupting the thread...", hde);
+ LOGGER.log(Level.WARN, runtimeId + " stop adapter timed out. interrupting the thread...", hde);
taskThread.interrupt();
} else {
- LOGGER.log(Level.WARNING, "Failure during attempt to stop " + runtimeId, hde);
+ LOGGER.log(Level.WARN, "Failure during attempt to stop " + runtimeId, hde);
throw hde;
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
index 24a7462..477a777 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
@@ -20,8 +20,6 @@
import java.nio.ByteBuffer;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActiveManager;
import org.apache.asterix.active.ActiveRuntimeId;
@@ -42,13 +40,16 @@
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.dataflow.common.utils.TaskUtil;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/*
* This IFrameWriter doesn't follow the contract
*/
public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(FeedMetaComputeNodePushable.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/** Runtime node pushable corresponding to the core feed operator **/
private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
@@ -142,7 +143,7 @@
FeedUtils.processFeedMessage(buffer, message, fta);
writer.nextFrame(buffer);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ LOGGER.log(Level.WARN, e.getMessage(), e);
throw new HyracksDataException(e);
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
index 459a3ea..ecd28be 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
@@ -20,8 +20,6 @@
import java.nio.ByteBuffer;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.active.ActiveManager;
import org.apache.asterix.active.ActiveRuntimeId;
@@ -44,10 +42,13 @@
import org.apache.hyracks.dataflow.common.utils.TaskUtil;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(FeedMetaStoreNodePushable.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/** Runtime node pushable corresponding to the core feed operator **/
private AbstractUnaryInputUnaryOutputOperatorNodePushable insertOperator;
@@ -118,7 +119,7 @@
initializeNewFeedRuntime(runtimeId);
insertOperator.open();
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failed to open feed store operator", e);
+ LOGGER.log(Level.WARN, "Failed to open feed store operator", e);
throw new HyracksDataException(e);
}
}
@@ -148,7 +149,7 @@
FeedUtils.processFeedMessage(buffer, message, fta);
writer.nextFrame(buffer);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure Processing a frame at store side", e);
+ LOGGER.log(Level.WARN, "Failure Processing a frame at store side", e);
throw HyracksDataException.create(e);
} finally {
tracer.durationE(tid, traceCategory, null);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
index ab1c424..74a20f5 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
@@ -38,12 +38,12 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FileSystemWatcher {
- private static final Logger LOGGER = Logger.getLogger(FileSystemWatcher.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private WatchService watcher;
private final HashMap<WatchKey, Path> keys;
private final LinkedList<File> files = new LinkedList<File>();
@@ -148,18 +148,14 @@
Path dir = keys.get(key);
if (dir == null) {
// This should never happen
- if (LOGGER.isEnabledFor(Level.WARN)) {
- LOGGER.warn("WatchKey not recognized!!");
- }
+ LOGGER.warn("WatchKey not recognized!!");
return;
}
for (WatchEvent<?> event : key.pollEvents()) {
Kind<?> kind = event.kind();
// An overflow event means that some events were dropped
if (kind == StandardWatchEventKinds.OVERFLOW) {
- if (LOGGER.isEnabledFor(Level.WARN)) {
- LOGGER.warn("Overflow event. Some events might have been missed");
- }
+ LOGGER.warn("Overflow event. Some events might have been missed");
// need to read and validate all files.
init();
return;
@@ -179,9 +175,7 @@
LocalFileSystemUtils.validateAndAdd(child, expression, files);
}
} catch (IOException e) {
- if (LOGGER.isEnabledFor(Level.ERROR)) {
- LOGGER.error(e);
- }
+ LOGGER.error(e);
}
}
}
@@ -241,17 +235,13 @@
try {
key = watcher.take();
} catch (InterruptedException x) {
- if (LOGGER.isEnabledFor(Level.WARN)) {
- LOGGER.warn("Feed Closed");
- }
+ LOGGER.warn("Feed Closed");
if (watcher == null) {
return null;
}
continue;
} catch (ClosedWatchServiceException e) {
- if (LOGGER.isEnabledFor(Level.WARN)) {
- LOGGER.warn("The watcher has exited");
- }
+ LOGGER.warn("The watcher has exited");
if (watcher == null) {
return null;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
index bd8e52d..8cc3466 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
@@ -23,12 +23,12 @@
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import twitter4j.DirectMessage;
import twitter4j.FilterQuery;
@@ -48,7 +48,7 @@
public class TwitterUtil {
- private static Logger LOGGER = Logger.getLogger(TwitterUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static class ConfigurationConstants {
public static final String KEY_LOCATIONS = "locations"; // locations to track
@@ -191,17 +191,17 @@
try {
tf = new TwitterFactory(cb.build());
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
+ if (LOGGER.isWarnEnabled()) {
StringBuilder builder = new StringBuilder();
builder.append("Twitter Adapter requires the following config parameters\n");
builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
- LOGGER.warning(builder.toString());
- LOGGER.warning(
+ LOGGER.warn(builder.toString());
+ LOGGER.warn(
"Unable to configure Twitter adapter due to incomplete/incorrect authentication credentials");
- LOGGER.warning(
+ LOGGER.warn(
"For details on how to obtain OAuth authentication token, visit https://dev.twitter.com/oauth"
+ "/overview/application-owner-access-tokens");
}
@@ -277,10 +277,8 @@
break;
}
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("unable to load authentication credentials from auth.properties file"
- + "credential information will be obtained from adapter's configuration");
- }
+ LOGGER.warn("unable to load authentication credentials from auth.properties file"
+ + "credential information will be obtained from adapter's configuration");
}
}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java
index 57f37da..ec1db67 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java
@@ -28,7 +28,8 @@
import org.apache.asterix.external.input.record.GenericRecord;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.couchbase.client.core.message.dcp.DCPRequest;
import com.couchbase.client.core.message.dcp.MutationMessage;
@@ -39,7 +40,7 @@
public class KVTestReader implements IRecordReader<DCPRequest> {
private final GenericRecord<DCPRequest> record;
- private static final Logger LOGGER = Logger.getLogger(KVTestReader.class);
+ private static final Logger LOGGER = LogManager.getLogger();
// Test variables
private final String bucket;
private final ArrayList<Short> assigned;
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
index fcd010d..d822f84 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
@@ -25,8 +25,6 @@
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.external.dataset.adapter.FeedAdapter;
import org.apache.asterix.om.types.ARecordType;
@@ -36,6 +34,8 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.std.file.ITupleParser;
import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TestTypedAdapter extends FeedAdapter {
@@ -51,7 +51,7 @@
protected final IAType sourceDatatype;
- protected static final Logger LOGGER = Logger.getLogger(TestTypedAdapter.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
public TestTypedAdapter(ITupleParserFactory parserFactory, ARecordType sourceDatatype, IHyracksTaskContext ctx,
Map<String, String> configuration, int partition) throws IOException {
@@ -71,10 +71,10 @@
if (pis != null) {
tupleParser.parse(pis, writer);
} else {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning(
- "Could not obtain input stream for parsing from adapter " + this + "[" + partition + "]");
- }
+ }
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn(
+ "Could not obtain input stream for parsing from adapter " + this + "[" + partition + "]");
}
}
diff --git a/asterixdb/asterix-lang-sqlpp/pom.xml b/asterixdb/asterix-lang-sqlpp/pom.xml
index dd69697..6b19fc7 100644
--- a/asterixdb/asterix-lang-sqlpp/pom.xml
+++ b/asterixdb/asterix-lang-sqlpp/pom.xml
@@ -148,10 +148,6 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </dependency>
- <dependency>
<groupId>org.apache.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
<version>${project.version}</version>
@@ -170,5 +166,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/ExpressionToVariableUtil.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/ExpressionToVariableUtil.java
index cd805a1..96c61f2 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/ExpressionToVariableUtil.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/ExpressionToVariableUtil.java
@@ -26,11 +26,12 @@
import org.apache.asterix.lang.common.expression.VariableExpr;
import org.apache.asterix.lang.common.struct.VarIdentifier;
import org.apache.asterix.lang.sqlpp.parser.ParseException;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExpressionToVariableUtil {
- private static final Logger LOGGER = Logger.getLogger(ExpressionToVariableUtil.class);
+ private static final Logger LOGGER = LogManager.getLogger();
private ExpressionToVariableUtil() {
}
diff --git a/asterixdb/asterix-metadata/pom.xml b/asterixdb/asterix-metadata/pom.xml
index 606b474..9c1bc11 100644
--- a/asterixdb/asterix-metadata/pom.xml
+++ b/asterixdb/asterix-metadata/pom.xml
@@ -179,5 +179,9 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java
index da6bb54..8ab9f82 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java
@@ -22,17 +22,18 @@
import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Logger;
import org.apache.asterix.metadata.api.IAsterixStateProxy;
import org.apache.asterix.metadata.api.IMetadataNode;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Contains Asterix distributed state such as the AsterixProperties.
*/
public class AsterixStateProxy implements IAsterixStateProxy {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(AsterixStateProxy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private IMetadataNode metadataNode;
private static final IAsterixStateProxy cc = new AsterixStateProxy();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index bf8079e..3205cb6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -23,8 +23,6 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.cluster.ClusterPartition;
@@ -90,6 +88,9 @@
import org.apache.hyracks.storage.am.lsm.common.impls.PrefixMergePolicyFactory;
import org.apache.hyracks.storage.common.ILocalResourceRepository;
import org.apache.hyracks.storage.common.LocalResource;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Initializes the remote metadata storage facilities ("universe") using a
@@ -102,7 +103,7 @@
*/
public class MetadataBootstrap {
public static final boolean IS_DEBUG_MODE = false;
- private static final Logger LOGGER = Logger.getLogger(MetadataBootstrap.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static INcApplicationContext appContext;
private static ILocalResourceRepository localResourceRepository;
private static IIOManager ioManager;
@@ -151,7 +152,7 @@
for (int i = 0; i < PRIMARY_INDEXES.length; i++) {
enlistMetadataDataset(ncServiceContext, PRIMARY_INDEXES[i]);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(
"Finished enlistment of metadata B-trees in " + (isNewUniverse ? "new" : "old") + " universe");
}
@@ -164,7 +165,7 @@
insertInitialAdapters(mdTxnCtx);
BuiltinFeedPolicies.insertInitialFeedPolicies(mdTxnCtx);
insertInitialCompactionPolicies(mdTxnCtx);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished creating metadata B-trees.");
}
}
@@ -174,7 +175,7 @@
} catch (Exception e) {
try {
if (IS_DEBUG_MODE) {
- LOGGER.log(Level.SEVERE, "Failure during metadata bootstrap", e);
+ LOGGER.log(Level.ERROR, "Failure during metadata bootstrap", e);
}
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
} catch (Exception e2) {
@@ -214,7 +215,7 @@
GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<String, String>(),
DatasetType.INTERNAL, indexes[i].getDatasetId().getId(), MetadataUtil.PENDING_NO_OP));
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished inserting initial datasets.");
}
}
@@ -234,7 +235,7 @@
types.get(i).getTypeName(), types.get(i), false));
}
MetadataManager.INSTANCE.addDatatype(mdTxnCtx, MetadataBuiltinEntities.ANY_OBJECT_DATATYPE);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished inserting initial datatypes.");
}
}
@@ -259,7 +260,7 @@
adapter = getAdapter(adapterClassName);
MetadataManager.INSTANCE.addAdapter(mdTxnCtx, adapter);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished inserting built-in adapters.");
}
}
@@ -379,7 +380,7 @@
// to INDEX_DATASET.
MetadataTransactionContext mdTxnCtx = null;
MetadataManager.INSTANCE.acquireWriteLatch();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting DDL recovery ...");
}
@@ -392,13 +393,13 @@
// the commit wasn't there before. yet, everything was working
// correctly!!!!!!!!!!!
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Completed DDL recovery.");
}
} catch (Exception e) {
try {
if (IS_DEBUG_MODE) {
- LOGGER.log(Level.SEVERE, "Failure during DDL recovery", e);
+ LOGGER.log(Level.ERROR, "Failure during DDL recovery", e);
}
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
} catch (Exception e2) {
@@ -415,7 +416,7 @@
if (dataverse.getPendingOp() != MetadataUtil.PENDING_NO_OP) {
// drop pending dataverse
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverse.getDataverseName());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Dropped a pending dataverse: " + dataverse.getDataverseName());
}
} else {
@@ -432,7 +433,7 @@
if (dataset.getPendingOp() != MetadataUtil.PENDING_NO_OP) {
// drop pending dataset
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(
"Dropped a pending dataset: " + dataset.getDataverseName() + "." + dataset.getDatasetName());
}
@@ -444,7 +445,7 @@
// drop pending index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName(),
index.getIndexName());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Dropped a pending index: " + dataset.getDataverseName() + "."
+ dataset.getDatasetName() + "." + index.getIndexName());
}
@@ -459,7 +460,7 @@
List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
for (ExternalFile file : files) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Dropped an external file: " + dataset.getDataverseName() + "."
+ dataset.getDatasetName() + "." + file.getFileNumber());
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 2386d77..60dd158 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -23,8 +23,6 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.IntStream;
import org.apache.asterix.active.IActiveEntityEventsListener;
@@ -110,6 +108,9 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
import org.apache.hyracks.storage.common.IResourceFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -123,7 +124,7 @@
* Constants
*/
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(Dataset.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
//TODO: Remove Singletons
private static final BTreeResourceFactoryProvider bTreeResourceFactoryProvider =
BTreeResourceFactoryProvider.INSTANCE;
@@ -611,7 +612,7 @@
try {
return mapper.writeValueAsString(toMap());
} catch (JsonProcessingException e) {
- LOGGER.log(Level.WARNING, "Unable to convert map to json String", e);
+ LOGGER.log(Level.WARN, "Unable to convert map to json String", e);
return dataverseName + "." + datasetName;
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
index ea514e4..2f9f9cb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
@@ -23,8 +23,6 @@
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.OrderedListBuilder;
@@ -44,6 +42,9 @@
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -51,7 +52,7 @@
public class ExternalDatasetDetails implements IDatasetDetails {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(ExternalDatasetDetails.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final String adapter;
private final Map<String, String> properties;
private Date lastRefreshTime;
@@ -152,7 +153,7 @@
try {
return mapper.writeValueAsString(toMap());
} catch (JsonProcessingException e) {
- LOGGER.log(Level.WARNING, "Unable to convert map to json String", e);
+ LOGGER.log(Level.WARN, "Unable to convert map to json String", e);
return getClass().getSimpleName();
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
index 5c67831..f9ce755 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
@@ -20,20 +20,19 @@
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import org.apache.asterix.common.exceptions.MetadataException;
import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.entities.FeedPolicyEntity;
import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class BuiltinFeedPolicies {
- private static final Logger LOGGER = Logger.getLogger(BuiltinFeedPolicies.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final FeedPolicyEntity BASIC = initializeBasicPolicy();
@@ -111,9 +110,7 @@
for (FeedPolicyEntity feedPolicy : BuiltinFeedPolicies.POLICIES) {
MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, feedPolicy);
}
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Finished adding built-in feed policies.");
- }
+ LOGGER.info("Finished adding built-in feed policies.");
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index 3d05c0e..cf663eb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -25,7 +25,6 @@
import java.util.Map;
import java.util.Set;
import java.util.UUID;
-import java.util.logging.Logger;
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.RecordBuilder;
@@ -90,9 +89,11 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
import org.apache.hyracks.storage.common.IResourceFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetUtil {
- private static final Logger LOGGER = Logger.getLogger(DatasetUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/*
* Dataset related operations
*/
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
index d625dd0..ef3346b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
@@ -25,8 +25,6 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
@@ -75,9 +73,11 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.hyracks.storage.common.IResourceFactory;
import org.apache.hyracks.storage.common.IStorageManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExternalIndexingOperations {
- private static final Logger LOGGER = Logger.getLogger(ExternalIndexingOperations.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final List<List<String>> FILE_INDEX_FIELD_NAMES =
Collections.unmodifiableList(Collections.singletonList(Collections.singletonList("")));
public static final List<IAType> FILE_INDEX_FIELD_TYPES =
@@ -141,7 +141,7 @@
}
return files;
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception while trying to get snapshot from external system", e);
+ LOGGER.warn("Exception while trying to get snapshot from external system", e);
throw new AlgebricksException("Unable to get list of HDFS files " + e);
}
}
diff --git a/asterixdb/asterix-replication/pom.xml b/asterixdb/asterix-replication/pom.xml
index 41afc1d..7072e48 100644
--- a/asterixdb/asterix-replication/pom.xml
+++ b/asterixdb/asterix-replication/pom.xml
@@ -87,6 +87,10 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/TxnLogReplicator.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/TxnLogReplicator.java
index 118fde6..2e4cb50 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/TxnLogReplicator.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/TxnLogReplicator.java
@@ -21,14 +21,15 @@
import java.util.concurrent.Callable;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* This class is responsible for sending transactions logs to remote replicas.
*/
public class TxnLogReplicator implements Callable<Boolean> {
- private static final Logger LOGGER = Logger.getLogger(TxnLogReplicator.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final ReplicationLogBuffer POISON_PILL = new ReplicationLogBuffer(null, 0, 0);
private final LinkedBlockingQueue<ReplicationLogBuffer> emptyQ;
private final LinkedBlockingQueue<ReplicationLogBuffer> flushQ;
@@ -97,10 +98,7 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "TxnLogReplicator is terminating abnormally. Logs Replication Stopped.",
- e);
- }
+ LOGGER.error("TxnLogReplicator is terminating abnormally. Logs Replication Stopped.", e);
throw e;
}
}
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
index 32e9498..c0863e2 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
@@ -38,8 +38,6 @@
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
@@ -93,13 +91,16 @@
import org.apache.hyracks.storage.common.LocalResource;
import org.apache.hyracks.util.StorageUtil;
import org.apache.hyracks.util.StorageUtil.StorageUnit;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* This class is used to receive and process replication requests from remote replicas or replica events from CC
*/
public class ReplicationChannel extends Thread implements IReplicationChannel {
- private static final Logger LOGGER = Logger.getLogger(ReplicationChannel.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int LOG_REPLICATION_END_HANKSHAKE_LOG_SIZE = 1;
private final ExecutorService replicationThreads;
private final String localNodeID;
@@ -190,7 +191,7 @@
replicationThreads.execute(new ReplicationThread(socketChannel));
}
} catch (AsynchronousCloseException e) {
- LOGGER.log(Level.WARNING, "Replication channel closed", e);
+ LOGGER.warn("Replication channel closed", e);
} catch (IOException e) {
throw new IllegalStateException(
"Could not open replication channel @ IP Address: " + nodeIP + ":" + dataPort, e);
@@ -296,17 +297,13 @@
replicationFunction = ReplicationProtocol.getRequestType(socketChannel, inBuffer);
}
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Unexpected error during replication.", e);
- }
+ LOGGER.warn("Unexpectedly error during replication.", e);
} finally {
if (socketChannel.isOpen()) {
try {
socketChannel.close();
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Filed to close replication socket.", e);
- }
+ LOGGER.warn("Filed to close replication socket.", e);
}
}
}
@@ -536,7 +533,7 @@
}
break;
default:
- LOGGER.severe("Unsupported LogType: " + remoteLog.getLogType());
+ LOGGER.error("Unsupported LogType: " + remoteLog.getLogType());
}
}
}
@@ -593,7 +590,7 @@
}
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed to checkpoint replica indexes", e);
+ LOGGER.error("Failed to checkpoint replica indexes", e);
}
}
@@ -639,9 +636,7 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failed to send job replication ACK", e);
- }
+ LOGGER.warn("Failed to send job replication ACK", e);
}
}
}
@@ -666,9 +661,7 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Unexpected exception during LSN synchronization", e);
- }
+ LOGGER.error("Unexpected exception during LSN synchronization", e);
}
}
}
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
index c0c491a..6445345 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
@@ -49,8 +49,6 @@
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.common.cluster.ClusterPartition;
@@ -95,13 +93,16 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexReplicationJob;
import org.apache.hyracks.util.StorageUtil;
import org.apache.hyracks.util.StorageUtil.StorageUnit;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* This class is used to process replication jobs and maintain remote replicas states
*/
public class ReplicationManager implements IReplicationManager {
- private static final Logger LOGGER = Logger.getLogger(ReplicationManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int INITIAL_REPLICATION_FACTOR = 1;
private static final int MAX_JOB_COMMIT_ACK_WAIT = 10000;
private final String nodeId;
@@ -158,7 +159,7 @@
replicationStrategy = ReplicationStrategyFactory.create(replicationProperties.getReplicationStrategy(),
replicationProperties, ncConfig.getConfigManager());
} catch (HyracksDataException e) {
- LOGGER.log(Level.WARNING, "Couldn't initialize replication strategy", e);
+ LOGGER.log(Level.WARN, "Couldn't initialize replication strategy", e);
}
this.replicaResourcesManager = (ReplicaResourcesManager) remoteResoucesManager;
this.asterixAppRuntimeContextProvider = asterixAppRuntimeContextProvider;
@@ -333,7 +334,7 @@
remainingFiles--;
Path path = Paths.get(filePath);
if (Files.notExists(path)) {
- LOGGER.log(Level.SEVERE, "File deleted before replication: " + filePath);
+ LOGGER.log(Level.ERROR, "File deleted before replication: " + filePath);
continue;
}
@@ -528,16 +529,14 @@
}
private void handleReplicationFailure(SocketChannel socketChannel, Throwable t) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Could not complete replication request.", t);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Could not complete replication request.", t);
}
if (socketChannel.isOpen()) {
try {
socketChannel.close();
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Could not close socket.", e);
- }
+ LOGGER.log(Level.WARN, "Could not close socket.", e);
}
}
reportFailedReplica(getReplicaIdBySocket(socketChannel));
@@ -547,21 +546,14 @@
* Stops TxnLogReplicator and closes the sockets used to replicate logs.
*/
private void endTxnLogReplicationHandshake() {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Terminating TxnLogReplicator thread ...");
- }
+ LOGGER.info("Terminating TxnLogReplicator thread ...");
txnlogReplicator.terminate();
try {
txnLogReplicatorTask.get();
} catch (ExecutionException | InterruptedException e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "TxnLogReplicator thread terminated abnormally", e);
- }
+ LOGGER.error("TxnLogReplicator thread terminated abnormally", e);
}
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("TxnLogReplicator thread was terminated.");
- }
+ LOGGER.info("TxnLogReplicator thread was terminated.");
/*
* End log replication handshake (by sending a dummy log with a single byte)
@@ -587,16 +579,14 @@
txnCommitAcks.wait(1000);
long waitDuration = System.currentTimeMillis() - waitStartTime;
if (waitDuration > MAX_JOB_COMMIT_ACK_WAIT) {
- LOGGER.log(Level.SEVERE,
+ LOGGER.log(Level.ERROR,
"Timeout before receving all job ACKs from replicas. Pending txns ("
+ txnCommitAcks.keySet().toString() + ")");
break;
}
}
} catch (InterruptedException e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Interrupted while waiting for jobs ACK", e);
- }
+ LOGGER.error("Interrupted while waiting for jobs ACK", e);
Thread.currentThread().interrupt();
}
}
@@ -764,8 +754,10 @@
replicationFactor--;
}
- LOGGER.log(Level.WARNING, "Replica " + replicaId + " state changed to: " + newState.name()
- + ". Replication factor changed to: " + replicationFactor);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Replica " + replicaId + " state changed to: " + newState.name()
+ + ". Replication factor changed to: " + replicationFactor);
+ }
if (suspendReplication) {
startReplicationThreads();
@@ -786,8 +778,8 @@
Set<String> replicaIds = txnCommitAcks.get(txnId);
replicaIds.add(replicaId);
} else {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Invalid job replication ACK received for txnId(" + txnId + ")");
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Invalid job replication ACK received for txnId(" + txnId + ")");
}
return;
}
@@ -838,8 +830,8 @@
SocketChannel sc = getReplicaSocket(replica.getId());
replicaNodesSockets.put(replica.getId(), sc);
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Could not get replica socket", e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Could not get replica socket", e);
}
reportFailedReplica(replica.getId());
}
@@ -1301,9 +1293,7 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Couldn't complete processing replication job", e);
- }
+ LOGGER.warn("Couldn't complete processing replication job", e);
}
}
@@ -1353,7 +1343,7 @@
addAckToJob(jobId, ackFrom);
}
} catch (AsynchronousCloseException e) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Replication listener stopped for remote replica: " + replicaId, e);
}
} catch (IOException e) {
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
index 54d8562..5d044b4 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
@@ -25,8 +25,6 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
@@ -40,15 +38,16 @@
import org.apache.asterix.common.replication.Replica;
import org.apache.asterix.common.transactions.ILogManager;
import org.apache.asterix.common.transactions.IRecoveryManager;
-import org.apache.asterix.common.utils.StorageConstants;
import org.apache.asterix.replication.storage.ReplicaResourcesManager;
import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RemoteRecoveryManager implements IRemoteRecoveryManager {
private final IReplicationManager replicationManager;
- private static final Logger LOGGER = Logger.getLogger(RemoteRecoveryManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final INcApplicationContext runtimeContext;
private final ReplicationProperties replicationProperties;
private Map<String, Set<String>> failbackRecoveryReplicas;
@@ -216,9 +215,7 @@
}
break;
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failed during remote recovery. Attempting again...", e);
- }
+ LOGGER.warn("Failed during remote recovery. Attempting again...", e);
maxRecoveryAttempts--;
}
}
@@ -259,9 +256,7 @@
* in case of failure during failback completion process we need to construct a new plan
* and get all the files from the start since the remote replicas will change in the new plan.
*/
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failed during completing failback. Restarting failback process...", e);
- }
+ LOGGER.warn("Failed during completing failback. Restarting failback process...", e);
startFailbackProcess();
}
@@ -317,9 +312,7 @@
logManager.renewLogFilesAndStartFromLSN(maxRemoteLSN);
break;
} catch (IOException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failed during remote recovery. Attempting again...", e);
- }
+ LOGGER.warn("Failed during remote recovery. Attempting again...", e);
maxRecoveryAttempts--;
}
}
diff --git a/asterixdb/asterix-runtime/pom.xml b/asterixdb/asterix-runtime/pom.xml
index f9bdda8..447eb53e 100644
--- a/asterixdb/asterix-runtime/pom.xml
+++ b/asterixdb/asterix-runtime/pom.xml
@@ -198,5 +198,9 @@
<version>1.10.19</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
index 8fad5e8..7fc264b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
@@ -194,7 +194,7 @@
try {
if (aggType == ATypeTag.SYSTEM_NULL) {
if (GlobalConfig.DEBUG) {
- GlobalConfig.ASTERIX_LOGGER.finest("AVG aggregate ran over empty input.");
+ GlobalConfig.ASTERIX_LOGGER.trace("AVG aggregate ran over empty input.");
}
result.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
} else if (aggType == ATypeTag.NULL) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
index 60e7127..48eb1de 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
@@ -182,7 +182,7 @@
// Double check that count 0 is accounted
if (aggType == ATypeTag.SYSTEM_NULL) {
if (GlobalConfig.DEBUG) {
- GlobalConfig.ASTERIX_LOGGER.finest("AVG aggregate ran over empty input.");
+ GlobalConfig.ASTERIX_LOGGER.trace("AVG aggregate ran over empty input.");
}
resultStorage.getDataOutput().writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
result.set(resultStorage);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CastTypeLaxEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CastTypeLaxEvaluator.java
index cbe04e2..35335c6 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CastTypeLaxEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CastTypeLaxEvaluator.java
@@ -25,13 +25,13 @@
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IPointable;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
class CastTypeLaxEvaluator extends CastTypeEvaluator {
- private static final Logger LOGGER = Logger.getLogger(CastTypeLaxEvaluator.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final byte[] MISSING_BYTES = new byte[] { ATypeTag.SERIALIZED_MISSING_TYPE_TAG };
@@ -49,8 +49,8 @@
try {
super.cast(result);
} catch (HyracksDataException e) {
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.log(Level.FINEST, e.toString(), e);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.log(Level.TRACE, e.toString(), e);
}
result.set(MISSING_BYTES, 0, MISSING_BYTES.length);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/InjectFailureDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/InjectFailureDescriptor.java
index af5f690..25b6c5d9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/InjectFailureDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/InjectFailureDescriptor.java
@@ -18,9 +18,6 @@
*/
package org.apache.asterix.runtime.evaluators.functions;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
@@ -38,12 +35,15 @@
import org.apache.hyracks.data.std.api.IPointable;
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class InjectFailureDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(SleepDescriptor.class.getSimpleName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -81,7 +81,7 @@
boolean argResult = ABooleanSerializerDeserializer.getBoolean(argPtr.getByteArray(),
argPtr.getStartOffset() + 1);
if (argResult) {
- LOGGER.log(Level.SEVERE, ctx.getTaskAttemptId() + " injecting failure");
+ LOGGER.log(Level.ERROR, ctx.getTaskAttemptId() + " injecting failure");
throw new RuntimeDataException(ErrorCode.INJECTED_FAILURE, getIdentifier());
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SleepDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SleepDescriptor.java
index a186b32..8bced98 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SleepDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SleepDescriptor.java
@@ -18,9 +18,6 @@
*/
package org.apache.asterix.runtime.evaluators.functions;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
@@ -33,12 +30,15 @@
import org.apache.hyracks.data.std.api.IPointable;
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class SleepDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(SleepDescriptor.class.getSimpleName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final IFunctionDescriptorFactory FACTORY = SleepDescriptor::new;
@@ -65,14 +65,14 @@
final long time = ATypeHierarchy.getLongValue(getIdentifier().getName(), 1, bytes, offset);
try {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, ctx.getTaskAttemptId() + " sleeping for " + time + " ms");
}
Thread.sleep(time);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, ctx.getTaskAttemptId() + " done sleeping for " + time + " ms");
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportLocalCountersMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportLocalCountersMessage.java
index 3f8ced8..0b321a2 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportLocalCountersMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportLocalCountersMessage.java
@@ -18,9 +18,6 @@
*/
package org.apache.asterix.runtime.message;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
@@ -30,10 +27,13 @@
import org.apache.asterix.transaction.management.service.transaction.TxnIdFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ReportLocalCountersMessage implements ICcAddressedMessage {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(ReportLocalCountersMessage.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final long maxResourceId;
private final long maxTxnId;
private final String src;
@@ -62,7 +62,7 @@
try {
((INCMessageBroker) ncs.getContext().getMessageBroker()).sendMessageToCC(countersMessage);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Unable to report local counters", e);
+ LOGGER.log(Level.ERROR, "Unable to report local counters", e);
throw HyracksDataException.create(e);
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index ead5f53..c07200a 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -29,8 +29,6 @@
import java.util.SortedMap;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
import org.apache.asterix.common.cluster.ClusterPartition;
@@ -49,6 +47,9 @@
import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig;
import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
@@ -65,7 +66,7 @@
* shutdown and using it on startup to identify the nodes that are expected the join.
*/
- private static final Logger LOGGER = Logger.getLogger(ClusterStateManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<String, Map<IOption, Object>> ncConfigMap = new HashMap<>();
private Set<String> pendingRemoval = new HashSet<>();
private ClusterState state = ClusterState.UNUSABLE;
@@ -91,7 +92,7 @@
@Override
public synchronized void notifyNodeFailure(String nodeId) throws HyracksException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Removing configuration parameters for node id " + nodeId);
}
failedNodes.add(nodeId);
@@ -102,7 +103,7 @@
@Override
public synchronized void notifyNodeJoin(String nodeId, Map<IOption, Object> configuration) throws HyracksException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Registering configuration parameters for node id " + nodeId);
}
failedNodes.remove(nodeId);
@@ -239,8 +240,8 @@
public synchronized String[] getIODevices(String nodeId) {
Map<IOption, Object> ncConfig = ncConfigMap.get(nodeId);
if (ncConfig == null) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Configuration parameters for nodeId " + nodeId
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Configuration parameters for nodeId " + nodeId
+ " not found. The node has not joined yet or has left.");
}
return new String[0];
@@ -380,7 +381,7 @@
@Override
public synchronized void registerNodePartitions(String nodeId, ClusterPartition[] nodePartitions)
throws AlgebricksException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Registering node partitions for node " + nodeId + ": " + Arrays.toString(nodePartitions));
}
// We want to make sure there are no conflicts; make two passes for simplicity...
@@ -403,7 +404,7 @@
if (nodePartitions == null) {
LOGGER.info("deregisterNodePartitions unknown node " + nodeId + " (already removed?)");
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("deregisterNodePartitions for node " + nodeId + ": " + Arrays.toString(nodePartitions));
}
for (ClusterPartition nodePartition : nodePartitions) {
@@ -415,23 +416,23 @@
@Override
public synchronized void removePending(String nodeId) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Registering intention to remove node id " + nodeId);
}
if (participantNodes.contains(nodeId)) {
pendingRemoval.add(nodeId);
} else {
- LOGGER.warning("Cannot register unknown node " + nodeId + " for pending removal");
+ LOGGER.warn("Cannot register unknown node " + nodeId + " for pending removal");
}
}
@Override
public synchronized boolean cancelRemovePending(String nodeId) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deregistering intention to remove node id " + nodeId);
}
if (!pendingRemoval.remove(nodeId)) {
- LOGGER.warning("Cannot deregister intention to remove node id " + nodeId + " that was not registered");
+ LOGGER.warn("Cannot deregister intention to remove node id " + nodeId + " that was not registered");
return false;
} else {
return true;
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index 7780f8b..72c84e4 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -610,5 +610,9 @@
<groupId>org.apache.hyracks</groupId>
<artifactId>hyracks-util</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/AbstractExecutionIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/AbstractExecutionIT.java
index 6c14aa0..8890898 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/AbstractExecutionIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/AbstractExecutionIT.java
@@ -23,7 +23,8 @@
import org.apache.asterix.testframework.xml.TestCase.CompilationUnit;
import org.apache.asterix.testframework.xml.TestGroup;
import org.apache.commons.lang3.StringUtils;
-import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.codehaus.plexus.util.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -38,8 +39,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import static org.apache.hyracks.util.file.FileUtil.joinPath;
@@ -49,7 +48,7 @@
@RunWith(Parameterized.class)
public abstract class AbstractExecutionIT {
- protected static final Logger LOGGER = Logger.getLogger(AbstractExecutionIT.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected static final String PATH_ACTUAL = joinPath("target", "ittest");
protected static final String PATH_BASE = joinPath("..", "asterix-app", "src", "test", "resources", "runtimets");
@@ -70,7 +69,7 @@
@BeforeClass
public static void setUp() throws Exception {
System.out.println("Starting setup");
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting setup");
}
File outdir = new File(PATH_ACTUAL);
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/DmlRecoveryIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/DmlRecoveryIT.java
index bc209c3..bac49b2 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/DmlRecoveryIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/DmlRecoveryIT.java
@@ -23,12 +23,13 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.asterix.test.base.RetainLogsRule;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
import org.apache.commons.io.FileUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -43,7 +44,7 @@
// variable to indicate whether this test will be executed
- private static final Logger LOGGER = Logger.getLogger(RecoveryIT.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String PATH_ACTUAL = "target" + File.separator + "rttest" + File.separator;
private static final String TESTSUITE_PATH_BASE = "../asterix-app/src/test/resources/runtimets/";
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
index 93b26dc..1d3c1c5 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
@@ -24,8 +24,6 @@
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.test.runtime.HDFSCluster;
@@ -36,6 +34,9 @@
import org.apache.hyracks.server.process.HyracksCCProcess;
import org.apache.hyracks.server.process.HyracksNCServiceProcess;
import org.apache.hyracks.server.process.HyracksVirtualCluster;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
@@ -91,7 +92,7 @@
public static final String ACTUAL_RESULTS_DIR = StringUtils.join(new String[] { TARGET_DIR, "ittest" },
File.separator);
- private static final Logger LOGGER = Logger.getLogger(NCServiceExecutionIT.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
enum KillCommand {
CC,
@@ -256,7 +257,7 @@
testExecutor.waitForClusterActive(30, TimeUnit.SECONDS);
} catch (Exception e) {
// stop executing the rest of the tests since the cluster is not ACTIVE
- LOGGER.log(Level.SEVERE, "Cannot continue since cluster is not ACTIVE", e);
+ LOGGER.log(Level.ERROR, "Cannot continue since cluster is not ACTIVE", e);
clusterActive = false;
Assert.fail("Cluster is not ACTIVE");
}
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java
index 21e3068..7aea282 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java
@@ -23,7 +23,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.asterix.test.base.RetainLogsRule;
import org.apache.asterix.test.common.TestExecutor;
@@ -31,6 +30,8 @@
import org.apache.asterix.testframework.context.TestCaseContext;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -43,7 +44,7 @@
@RunWith(Parameterized.class)
public class RecoveryIT {
- private static final Logger LOGGER = Logger.getLogger(RecoveryIT.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String PATH_ACTUAL = "target" + File.separator + "rttest" + File.separator;
private static final String PATH_BASE = "src/test/resources/transactionts/";
private static final String HDFS_BASE = "../asterix-app/";
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java
index 6ebb632..aa2d97d 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java
@@ -22,7 +22,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Logger;
import org.apache.asterix.test.base.RetainLogsRule;
import org.apache.asterix.test.common.TestExecutor;
@@ -33,7 +32,12 @@
import org.apache.hyracks.server.process.HyracksNCServiceProcess;
import org.apache.hyracks.server.process.HyracksVirtualCluster;
import org.apache.hyracks.util.file.FileUtil;
-import org.junit.*;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -52,7 +56,7 @@
public static final String CONF_DIR = StringUtils.join(new String[] { TARGET_DIR, "test-classes", "ReplicationIT" },
File.separator);
private static final String PATH_ACTUAL = FileUtil.joinPath("target", "ittest");
- private static final Logger LOGGER = Logger.getLogger(ReplicationIT.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static String reportPath = new File(FileUtil.joinPath("target", "failsafe-reports")).getAbsolutePath();
private final TestExecutor testExecutor = new TestExecutor();
diff --git a/asterixdb/asterix-tools/pom.xml b/asterixdb/asterix-tools/pom.xml
index efe1418..40453e1 100644
--- a/asterixdb/asterix-tools/pom.xml
+++ b/asterixdb/asterix-tools/pom.xml
@@ -153,5 +153,9 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/TweetGeneratorForSpatialIndexEvaluation.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/TweetGeneratorForSpatialIndexEvaluation.java
index fcaa2a4..0b7121f 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/TweetGeneratorForSpatialIndexEvaluation.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/TweetGeneratorForSpatialIndexEvaluation.java
@@ -23,16 +23,16 @@
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.tools.external.data.DataGeneratorForSpatialIndexEvaluation.InitializationInfo;
import org.apache.asterix.tools.external.data.DataGeneratorForSpatialIndexEvaluation.TweetMessage;
import org.apache.asterix.tools.external.data.DataGeneratorForSpatialIndexEvaluation.TweetMessageIterator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TweetGeneratorForSpatialIndexEvaluation {
- private static Logger LOGGER = Logger.getLogger(TweetGeneratorForSpatialIndexEvaluation.class.getName());
+ private static Logger LOGGER = LogManager.getLogger();
public static final String KEY_DURATION = "duration";
public static final String KEY_TPS = "tps";
@@ -116,7 +116,7 @@
numFlushedTweets += frameTweetCount;
frameTweetCount = 0;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Reached end of batch. Tweet Count: [" + partition + "]" + tweetCount);
}
return false;
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java
index 1eef3c3..b6d44e0 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java
@@ -24,8 +24,9 @@
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
@@ -40,7 +41,7 @@
@RunWith(Parameterized.class)
public class AdmDataGenTest {
- private static final Logger LOGGER = Logger.getLogger(AdmDataGenTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String SEPARATOR = File.separator;
private static final String EXTENSION_QUERY = "adg";
diff --git a/asterixdb/asterix-transactions/pom.xml b/asterixdb/asterix-transactions/pom.xml
index b7e1871..7ec8307 100644
--- a/asterixdb/asterix-transactions/pom.xml
+++ b/asterixdb/asterix-transactions/pom.xml
@@ -151,5 +151,9 @@
<groupId>org.apache.hyracks</groupId>
<artifactId>algebricks-runtime</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ConcurrentLockManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ConcurrentLockManager.java
index 3aa2578..726f95c 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ConcurrentLockManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ConcurrentLockManager.java
@@ -24,8 +24,6 @@
import java.util.ArrayList;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.DatasetId;
@@ -34,6 +32,9 @@
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* A concurrent implementation of the ILockManager interface.
@@ -43,8 +44,8 @@
*/
public class ConcurrentLockManager implements ILockManager, ILifeCycleComponent {
- static final Logger LOGGER = Logger.getLogger(ConcurrentLockManager.class.getName());
- static final Level LVL = Level.FINER;
+ static final Logger LOGGER = LogManager.getLogger();
+ static final Level LVL = Level.TRACE;
public static final boolean ENABLED_DEADLOCK_FREE_LOCKING_PROTOCOL = true;
public static final int NIL = -1;
@@ -375,7 +376,7 @@
if (reqSlot != NILL) {
// deallocate request, if we allocated one earlier
if (DEBUG_MODE) {
- LOGGER.finer("del req slot " + TypeUtil.Global.toString(reqSlot));
+ LOGGER.trace("del req slot " + TypeUtil.Global.toString(reqSlot));
}
reqArenaMgr.deallocate(reqSlot);
}
@@ -494,7 +495,7 @@
// deallocate request
if (DEBUG_MODE) {
- LOGGER.finer("del req slot " + TypeUtil.Global.toString(holder));
+ LOGGER.trace("del req slot " + TypeUtil.Global.toString(holder));
}
reqArenaMgr.deallocate(holder);
// deallocate resource or fix max lock mode
@@ -509,7 +510,7 @@
resArenaMgr.setNext(prev, resArenaMgr.getNext(resource));
}
if (DEBUG_MODE) {
- LOGGER.finer("del res slot " + TypeUtil.Global.toString(resource));
+ LOGGER.trace("del res slot " + TypeUtil.Global.toString(resource));
}
resArenaMgr.deallocate(resource);
} else {
@@ -535,7 +536,7 @@
return;
}
//System.err.println(table.append(new StringBuilder(), true).toString());
- if (LOGGER.isLoggable(LVL)) {
+ if (LOGGER.isEnabled(LVL)) {
LOGGER.log(LVL, "jobArenaMgr " + jobArenaMgr.addTo(new RecordManagerStats()).toString());
LOGGER.log(LVL, "resArenaMgr " + resArenaMgr.addTo(new RecordManagerStats()).toString());
LOGGER.log(LVL, "reqArenaMgr " + reqArenaMgr.addTo(new RecordManagerStats()).toString());
@@ -554,11 +555,11 @@
}
}
if (DEBUG_MODE) {
- LOGGER.finer("del job slot " + TypeUtil.Global.toString(jobSlot));
+ LOGGER.trace("del job slot " + TypeUtil.Global.toString(jobSlot));
}
jobArenaMgr.deallocate(jobSlot);
txnId2TxnSlotMap.remove(txnId);
- stats.logCounters(LOGGER, Level.FINE, true);
+ stats.logCounters(LOGGER, Level.DEBUG, true);
}
private long findOrAllocJobSlot(long txnId) {
@@ -566,7 +567,7 @@
if (jobSlot == null) {
jobSlot = new Long(jobArenaMgr.allocate());
if (DEBUG_MODE) {
- LOGGER.finer("new job slot " + TypeUtil.Global.toString(jobSlot) + " (" + txnId + ")");
+ LOGGER.trace("new job slot " + TypeUtil.Global.toString(jobSlot) + " (" + txnId + ")");
}
jobArenaMgr.setTxnId(jobSlot, txnId);
Long oldSlot = txnId2TxnSlotMap.putIfAbsent(txnId, jobSlot);
@@ -575,7 +576,7 @@
// get(..) and putIfAbsent(..), we'll use that slot and
// deallocate the one we allocated
if (DEBUG_MODE) {
- LOGGER.finer("del job slot " + TypeUtil.Global.toString(jobSlot) + " due to conflict");
+ LOGGER.trace("del job slot " + TypeUtil.Global.toString(jobSlot) + " due to conflict");
}
jobArenaMgr.deallocate(jobSlot);
jobSlot = oldSlot;
@@ -596,12 +597,12 @@
resArenaMgr.setNext(resSlot, group.firstResourceIndex.get());
group.firstResourceIndex.set(resSlot);
if (DEBUG_MODE) {
- LOGGER.finer("new res slot " + TypeUtil.Global.toString(resSlot) + " (" + dsId + ", " + entityHashValue
+ LOGGER.trace("new res slot " + TypeUtil.Global.toString(resSlot) + " (" + dsId + ", " + entityHashValue
+ ")");
}
} else {
if (DEBUG_MODE) {
- LOGGER.finer("fnd res slot " + TypeUtil.Global.toString(resSlot) + " (" + dsId + ", " + entityHashValue
+ LOGGER.trace("fnd res slot " + TypeUtil.Global.toString(resSlot) + " (" + dsId + ", " + entityHashValue
+ ")");
}
}
@@ -614,7 +615,7 @@
reqArenaMgr.setLockMode(reqSlot, lockMode); // lock mode is a byte!!
reqArenaMgr.setJobSlot(reqSlot, jobSlot);
if (DEBUG_MODE) {
- LOGGER.finer("new req slot " + TypeUtil.Global.toString(reqSlot) + " (" + TypeUtil.Global.toString(resSlot)
+ LOGGER.trace("new req slot " + TypeUtil.Global.toString(reqSlot) + " (" + TypeUtil.Global.toString(resSlot)
+ ", " + TypeUtil.Global.toString(jobSlot) + ", " + LockMode.toString(lockMode) + ")");
}
return reqSlot;
@@ -667,7 +668,7 @@
}
private long findResourceInGroup(ResourceGroup group, int dsId, int entityHashValue) {
- stats.logCounters(LOGGER, Level.FINE, false);
+ stats.logCounters(LOGGER, Level.DEBUG, false);
long resSlot = group.firstResourceIndex.get();
while (resSlot != NILL) {
// either we already have a lock on this resource or we have a
@@ -934,7 +935,7 @@
*/
private void log(String string, int id, int entityHashValue, byte lockMode, ITransactionContext txnContext) {
- if (!LOGGER.isLoggable(LVL)) {
+ if (!LOGGER.isEnabled(LVL)) {
return;
}
StringBuilder sb = new StringBuilder();
@@ -980,7 +981,7 @@
group.releaseLatch();
}
} else {
- LOGGER.warning("Could not check locks for " + group);
+ LOGGER.warn("Could not check locks for " + group);
}
}
} catch (InterruptedException e) {
@@ -993,7 +994,7 @@
String msg = "request for " + LockMode.toString(lockMode) + " lock on dataset " + dsId + " entity "
+ entityHashValue + " not found for txn " + txnId + " in thread "
+ Thread.currentThread().getName();
- LOGGER.severe(msg);
+ LOGGER.error(msg);
throw new IllegalStateException(msg);
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java
index 1050d54..4d9c94a5 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java
@@ -20,8 +20,9 @@
package org.apache.asterix.transaction.management.service.locking;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
final class LockManagerStats {
private final int loggingPeriod;
@@ -67,7 +68,7 @@
}
final void logCounters(final Logger logger, final Level lvl, boolean always) {
- if (logger.isLoggable(lvl)
+ if (logger.isEnabled(lvl)
&& (always || requestSum() % loggingPeriod == 0)) {
logger.log(lvl, toString());
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java
index bec4e53..436745d 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java
@@ -53,7 +53,7 @@
try {
return latch.writeLock().tryLock(timeout, unit);
} catch (InterruptedException e) {
- ConcurrentLockManager.LOGGER.finer("interrupted while wating on ResourceGroup");
+ ConcurrentLockManager.LOGGER.trace("interrupted while wating on ResourceGroup");
throw e;
}
}
@@ -72,7 +72,7 @@
try {
condition.await();
} catch (InterruptedException e) {
- ConcurrentLockManager.LOGGER.finer("interrupted while waiting on ResourceGroup");
+ ConcurrentLockManager.LOGGER.trace("interrupted while waiting on ResourceGroup");
throw e;
}
}
@@ -83,7 +83,7 @@
}
void log(String s) {
- if (ConcurrentLockManager.LOGGER.isLoggable(ConcurrentLockManager.LVL)) {
+ if (ConcurrentLockManager.LOGGER.isEnabled(ConcurrentLockManager.LVL)) {
ConcurrentLockManager.LOGGER.log(ConcurrentLockManager.LVL, s + " " + toString());
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
index 96a31c6..dcf8250 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
@@ -23,7 +23,6 @@
import java.nio.channels.FileChannel;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Logger;
import org.apache.asterix.common.context.PrimaryIndexOperationTracker;
import org.apache.asterix.common.exceptions.ACIDException;
@@ -40,11 +39,13 @@
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LogBuffer implements ILogBuffer {
public static final boolean IS_DEBUG_MODE = false;//true
- private static final Logger LOGGER = Logger.getLogger(LogBuffer.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ITransactionSubsystem txnSubsystem;
private final LogBufferTailReader logBufferTailReader;
private final int logPageSize;
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java
index cdd957a..4ce9c71 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java
@@ -35,8 +35,6 @@
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.replication.IReplicationManager;
@@ -53,13 +51,15 @@
import org.apache.asterix.common.transactions.TxnLogFile;
import org.apache.asterix.common.utils.InvokeUtil;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
public class LogManager implements ILogManager, ILifeCycleComponent {
/*
* Constants
*/
- private static final Logger LOGGER = Logger.getLogger(LogManager.class.getName());
+ private static final Logger LOGGER = org.apache.logging.log4j.LogManager.getLogger();
private static final long SMALLEST_LOG_FILE_ID = 0;
private static final int INITIAL_LOG_SIZE = 0;
public static final boolean IS_DEBUG_MODE = false;// true
@@ -116,7 +116,7 @@
}
appendLSN.set(initializeLogAnchor(nextLogFileId));
flushLSN.set(appendLSN.get());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("LogManager starts logging in LSN: " + appendLSN);
}
appendChannel = getFileChannel(appendLSN.get(), false);
@@ -244,7 +244,7 @@
appendLSN.addAndGet(logFileSize - getLogFileOffset(appendLSN.get()));
flushLSN.set(appendLSN.get());
appendChannel = getFileChannel(appendLSN.get(), true);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Created new txn log file with id(" + getLogFileId(appendLSN.get()) + ") starting with LSN = "
+ appendLSN.get());
}
@@ -334,7 +334,7 @@
if (logFileIds == null) {
fileId = nextLogFileId;
createFileIfNotExists(getLogFilePath(fileId));
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("created a log file: " + getLogFilePath(fileId));
}
} else {
@@ -345,18 +345,18 @@
} else {
fileId = nextLogFileId;
createNewDirectory(logDir);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("created the log directory: " + logManagerProperties.getLogDir());
}
createFileIfNotExists(getLogFilePath(fileId));
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("created a log file: " + getLogFilePath(fileId));
}
}
} catch (IOException ioe) {
throw new IllegalStateException("Failed to initialize the log anchor", ioe);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("log file Id: " + fileId + ", offset: " + offset);
}
return logFileSize * fileId + offset;
@@ -395,7 +395,7 @@
File file = new File(getLogFilePath(id));
file.delete();
txnLogFileId2ReaderCount.remove(id);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deleted log file " + file.getAbsolutePath());
}
}
@@ -404,20 +404,20 @@
}
private void terminateLogFlusher() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Terminating LogFlusher thread ...");
}
logFlusher.terminate();
try {
futureLogFlusher.get();
} catch (ExecutionException | InterruptedException e) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("---------- warning(begin): LogFlusher thread is terminated abnormally --------");
e.printStackTrace();
LOGGER.info("---------- warning(end) : LogFlusher thread is terminated abnormally --------");
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("LogFlusher thread is terminated.");
}
}
@@ -580,7 +580,7 @@
@Override
public void closeLogFile(TxnLogFile logFileRef, FileChannel fileChannel) throws IOException {
if (!fileChannel.isOpen()) {
- LOGGER.warning(() -> "Closing log file with id(" + logFileRef.getLogFileId() + ") with a closed channel.");
+ LOGGER.warn(() -> "Closing log file with id(" + logFileRef.getLogFileId() + ") with a closed channel.");
}
fileChannel.close();
untouchLogFile(logFileRef.getLogFileId());
@@ -634,7 +634,7 @@
}
class LogFlusher implements Callable<Boolean> {
- private static final Logger LOGGER = Logger.getLogger(LogFlusher.class.getName());
+ private static final Logger LOGGER = org.apache.logging.log4j.LogManager.getLogger();
private static final ILogBuffer POISON_PILL = new LogBuffer(null, ILogRecord.JOB_TERMINATE_LOG_SIZE, null);
private final LogManager logMgr;//for debugging
private final LinkedBlockingQueue<ILogBuffer> emptyQ;
@@ -685,7 +685,7 @@
emptyQ.add(flushPage.getLogPageSize() == logMgr.getLogPageSize() ? flushPage : stashQ.remove());
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "LogFlusher is terminating abnormally. System is in unusable state.", e);
+ LOGGER.log(Level.ERROR, "LogFlusher is terminating abnormally. System is in unusable state.", e);
throw e;
} finally {
if (interrupted) {
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogReader.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogReader.java
index f2c5eef..8290e94 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogReader.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogReader.java
@@ -20,7 +20,6 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.ILogManager;
@@ -31,11 +30,13 @@
import org.apache.asterix.common.transactions.MutableLong;
import org.apache.asterix.common.transactions.TxnLogFile;
import org.apache.hyracks.util.annotations.NotThreadSafe;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@NotThreadSafe
public class LogReader implements ILogReader {
- private static final Logger LOGGER = Logger.getLogger(LogReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ILogManager logMgr;
private final long logFileSize;
private final int logPageSize;
@@ -88,7 +89,7 @@
if (readBuffer.position() == readBuffer.limit()) {
boolean hasRemaining = refillLogReadBuffer();
if (!hasRemaining && isRecoveryMode && readLSN < flushLSN.get()) {
- LOGGER.severe("Transaction log ends before expected. Log files may be missing.");
+ LOGGER.error("Transaction log ends before expected. Log files may be missing.");
return null;
}
}
@@ -119,7 +120,7 @@
continue;
}
case BAD_CHKSUM: {
- LOGGER.severe(
+ LOGGER.error(
"Transaction log contains corrupt log records (perhaps due to medium error). Stopping recovery early.");
return null;
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java
index aad2a19..386c1c4 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java
@@ -31,8 +31,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.Checkpoint;
@@ -43,6 +41,9 @@
import org.apache.asterix.common.transactions.ITransactionSubsystem;
import org.apache.asterix.common.utils.StorageConstants;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* An abstract implementation of {@link ICheckpointManager}.
@@ -51,7 +52,7 @@
*/
public abstract class AbstractCheckpointManager implements ICheckpointManager {
- private static final Logger LOGGER = Logger.getLogger(AbstractCheckpointManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String CHECKPOINT_FILENAME_PREFIX = "checkpoint_";
public static final long SHARP_CHECKPOINT_LSN = -1;
private static final FilenameFilter filter = (File dir, String name) -> name.startsWith(CHECKPOINT_FILENAME_PREFIX);
@@ -65,7 +66,7 @@
public AbstractCheckpointManager(ITransactionSubsystem txnSubsystem, CheckpointProperties checkpointProperties) {
this.txnSubsystem = txnSubsystem;
String checkpointDirPath = checkpointProperties.getCheckpointDirPath();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Checkpoint directory = " + checkpointDirPath);
}
if (!checkpointDirPath.endsWith(File.separator)) {
@@ -74,7 +75,7 @@
checkpointDir = new File(checkpointDirPath);
// Create the checkpoint directory if missing
if (!checkpointDir.exists()) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Checkpoint directory " + checkpointDirPath + " didn't exist. Creating one");
}
checkpointDir.mkdirs();
@@ -91,38 +92,38 @@
LOGGER.log(Level.INFO, "Getting latest checkpoint");
File[] checkpoints = checkpointDir.listFiles(filter);
if (checkpoints == null || checkpoints.length == 0) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO,
"Listing of files in the checkpoint dir returned " + (checkpoints == null ? "null" : "empty"));
}
return null;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Listing of files in the checkpoint dir returned " + Arrays.toString(checkpoints));
}
List<Checkpoint> checkpointObjectList = new ArrayList<>();
for (File file : checkpoints) {
try {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Reading checkpoint file: " + file.getAbsolutePath());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Reading checkpoint file: " + file.getAbsolutePath());
}
String jsonString = new String(Files.readAllBytes(Paths.get(file.getAbsolutePath())));
checkpointObjectList.add(Checkpoint.fromJson(jsonString));
} catch (ClosedByInterruptException e) {
Thread.currentThread().interrupt();
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Interrupted while reading checkpoint file: " + file.getAbsolutePath(),
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Interrupted while reading checkpoint file: " + file.getAbsolutePath(),
e);
}
throw new ACIDException(e);
} catch (IOException e) {
// ignore corrupted checkpoint file
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failed to read checkpoint file: " + file.getAbsolutePath(), e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Failed to read checkpoint file: " + file.getAbsolutePath(), e);
}
file.delete();
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Deleted corrupted checkpoint file: " + file.getAbsolutePath());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Deleted corrupted checkpoint file: " + file.getAbsolutePath());
}
}
}
@@ -132,7 +133,7 @@
* This shouldn't happen unless a hardware corruption happens.
*/
if (checkpointObjectList.isEmpty()) {
- LOGGER.severe("All checkpoint files are corrupted. Forcing recovery from the beginning of the log");
+ LOGGER.error("All checkpoint files are corrupted. Forcing recovery from the beginning of the log");
checkpointObjectList.add(forgeForceRecoveryCheckpoint());
}
@@ -195,7 +196,7 @@
// Get checkpoint file path
Path path = getCheckpointPath(checkpoint.getTimeStamp());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
File file = path.toFile();
LOGGER.log(Level.INFO, "Persisting checkpoint file to " + file + " which "
+ (file.exists() ? "already exists" : "doesn't exist yet"));
@@ -205,10 +206,10 @@
writer.write(checkpoint.asJson());
writer.flush();
} catch (IOException e) {
- LOGGER.log(Level.SEVERE, "Failed to write checkpoint to disk", e);
+ LOGGER.log(Level.ERROR, "Failed to write checkpoint to disk", e);
throw HyracksDataException.create(e);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
File file = path.toFile();
LOGGER.log(Level.INFO, "Completed persisting checkpoint file to " + file + " which now "
+ (file.exists() ? "exists" : " still doesn't exist"));
@@ -220,13 +221,11 @@
// Sort the filenames lexicographically to keep the latest checkpoint history files.
Arrays.sort(checkpointFiles);
for (int i = 0; i < checkpointFiles.length - historyToKeep; i++) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Deleting checkpoint file at: " + checkpointFiles[i].getAbsolutePath());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Deleting checkpoint file at: " + checkpointFiles[i].getAbsolutePath());
}
- if (!checkpointFiles[i].delete()) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Could not delete checkpoint file at: " + checkpointFiles[i].getAbsolutePath());
- }
+ if (!checkpointFiles[i].delete() && LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Could not delete checkpoint file at: " + checkpointFiles[i].getAbsolutePath());
}
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java
index ea711a5..3cb91ff 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java
@@ -18,13 +18,13 @@
*/
package org.apache.asterix.transaction.management.service.recovery;
-import java.util.logging.Logger;
-
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.transactions.CheckpointProperties;
import org.apache.asterix.common.transactions.ICheckpointManager;
import org.apache.asterix.common.transactions.ITransactionSubsystem;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* An implementation of {@link ICheckpointManager} that defines the logic
@@ -32,7 +32,7 @@
*/
public class CheckpointManager extends AbstractCheckpointManager {
- private static final Logger LOGGER = Logger.getLogger(CheckpointManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public CheckpointManager(ITransactionSubsystem txnSubsystem, CheckpointProperties checkpointProperties) {
super(txnSubsystem, checkpointProperties);
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointThread.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointThread.java
index 5de76d3..1992057 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointThread.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointThread.java
@@ -18,12 +18,12 @@
*/
package org.apache.asterix.transaction.management.service.recovery;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.asterix.common.transactions.ICheckpointManager;
import org.apache.asterix.common.transactions.ILogManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* A daemon thread that periodically attempts to perform checkpoints.
@@ -32,7 +32,7 @@
*/
public class CheckpointThread extends Thread {
- private static final Logger LOGGER = Logger.getLogger(CheckpointThread.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private long lsnThreshold;
private long checkpointTermInSecs;
@@ -71,7 +71,7 @@
//last checkpoint LSN is considered as the min LSN of the current log partition
lastCheckpointLSN = logManager.getReadableSmallestLSN();
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Error getting smallest readable LSN", e);
+ LOGGER.log(Level.WARN, "Error getting smallest readable LSN", e);
lastCheckpointLSN = 0;
}
}
@@ -95,7 +95,7 @@
lastCheckpointLSN = currentCheckpointAttemptMinLSN;
}
} catch (HyracksDataException e) {
- LOGGER.log(Level.SEVERE, "Error during checkpoint", e);
+ LOGGER.log(Level.ERROR, "Error during checkpoint", e);
}
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
index 9f5b83c..4bbcabe 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
@@ -21,7 +21,6 @@
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
-import java.util.logging.Logger;
import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
@@ -34,6 +33,8 @@
import org.apache.asterix.common.transactions.ITransactionSubsystem;
import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* An implementation of {@link ICheckpointManager} that defines the logic
@@ -41,7 +42,7 @@
*/
public class ReplicationCheckpointManager extends AbstractCheckpointManager {
- private static final Logger LOGGER = Logger.getLogger(ReplicationCheckpointManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public ReplicationCheckpointManager(ITransactionSubsystem txnSubsystem, CheckpointProperties checkpointProperties) {
super(txnSubsystem, checkpointProperties);
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java
index 6b414b8..76ecc63 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java
@@ -24,8 +24,6 @@
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.ITransactionContext;
@@ -37,11 +35,14 @@
import org.apache.asterix.common.utils.TransactionUtil;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
import org.apache.hyracks.util.annotations.ThreadSafe;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@ThreadSafe
public class TransactionManager implements ITransactionManager, ILifeCycleComponent {
- private static final Logger LOGGER = Logger.getLogger(TransactionManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ITransactionSubsystem txnSubsystem;
private final Map<TxnId, ITransactionContext> txnCtxRepository = new ConcurrentHashMap<>();
private final AtomicLong maxTxnId = new AtomicLong(0);
@@ -83,8 +84,8 @@
txnCtx.setTxnState(ITransactionManager.COMMITTED);
}
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.severe(" caused exception in commit !" + txnCtx.getTxnId());
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.error(" caused exception in commit !" + txnCtx.getTxnId());
}
throw e;
} finally {
@@ -107,8 +108,8 @@
}
} catch (ACIDException e) {
String msg = "Could not complete rollback! System is in an inconsistent state";
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, msg, e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, msg, e);
}
throw new ACIDException(msg, e);
} finally {
@@ -172,7 +173,7 @@
sb.append("\n>>dump_end\t>>----- [ConfVars] -----\n");
os.write(sb.toString().getBytes());
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "exception while dumping state", e);
+ LOGGER.log(Level.WARN, "exception while dumping state", e);
}
}
}
diff --git a/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/LockManagerUnitTest.java b/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/LockManagerUnitTest.java
index 64ac3cb..817e0f0 100644
--- a/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/LockManagerUnitTest.java
+++ b/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/LockManagerUnitTest.java
@@ -30,8 +30,6 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.ConsoleHandler;
-import java.util.logging.Logger;
import org.apache.asterix.common.transactions.DatasetId;
import org.apache.asterix.common.transactions.ILockManager;
@@ -53,10 +51,6 @@
static long COORDINATOR_SLEEP = 20;
static int TIMEOUT_MS = 100;
- static {
- Logger.getLogger(ConcurrentLockManager.class.getName()).addHandler(new ConsoleHandler());
- }
-
Map<Integer, ITransactionContext> jobId2TxnCtxMap;
ILockManager lockMgr;
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 234a766..0308d22 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -94,6 +94,7 @@
-enableassertions -Xmx${test.heap.size}m
-Dfile.encoding=UTF-8
-Djava.util.logging.config.file=${user.home}/logging.properties
+ -Dlog4j.configurationFile=${basedir}/../asterix-app/src/test/resources/log4j2-test.xml
-DrunSlowAQLTests=${runSlowAQLTests}
-Xdebug
-Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=${debug.suspend.flag}
diff --git a/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml b/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
index 04f57cc..706e2ce 100644
--- a/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
@@ -90,10 +90,14 @@
<version>1.6.6</version>
<scope>test</scope>
</dependency>
- <dependency>
+ <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
- </dependency>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java b/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
index 311aa43..79b8f38 100644
--- a/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
@@ -93,7 +93,7 @@
@Override
public JobSpecification createJob(Object appContext,
IJobletEventListenerFactory jobEventListenerFactory) throws AlgebricksException {
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting Job Generation.\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug("Starting Job Generation.\n");
JobGenContext context = new JobGenContext(null, metadata, appContext,
serializerDeserializerProvider, hashFunctionFactoryProvider, hashFunctionFamilyProvider,
comparatorFactoryProvider, typeTraitProvider, binaryBooleanInspectorFactory,
diff --git a/hyracks-fullstack/algebricks/algebricks-core/pom.xml b/hyracks-fullstack/algebricks/algebricks-core/pom.xml
index dd135c6..b8666c4 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-core/pom.xml
@@ -81,5 +81,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
index 114fde0..0595ff8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
@@ -155,7 +155,7 @@
Object expectedType = expected.getVarType(expectedVariables.get(i));
Object actualType = actual.getVarType(actualVariables.get(i));
if (!expectedType.equals(actualType)) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.warning(
+ AlgebricksConfig.ALGEBRICKS_LOGGER.warn(
"Type of two variables are not equal." + expectedVariables.get(i) + " is of type: "
+ expectedType + actualVariables.get(i) + " is of type: " + actualType);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
index f800be8..39d522f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
@@ -315,7 +315,7 @@
}
if (changed) {
AlgebricksConfig.ALGEBRICKS_LOGGER
- .fine(">>>> Group-by list changed from " + GroupByOperator.veListToString(gByList) + " to "
+ .debug(">>>> Group-by list changed from " + GroupByOperator.veListToString(gByList) + " to "
+ GroupByOperator.veListToString(newGbyList) + ".\n");
}
gByList.clear();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
index bdabbca..5d9e2dc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
@@ -81,7 +81,7 @@
}
op.accept(visitor, context);
if (AlgebricksConfig.DEBUG) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(
"Logical properties visitor for " + op + ": " + context.getLogicalPropertiesVector(op) + "\n");
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
index ee0bec7..83591ee 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
@@ -21,7 +21,6 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
-import java.util.logging.Logger;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
@@ -58,6 +57,8 @@
import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
import org.apache.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
import org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
@@ -67,7 +68,7 @@
private final int aveRecordsPerFrame;
private final double fudgeFactor;
- private static final Logger LOGGER = Logger.getLogger(HybridHashJoinPOperator.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public HybridHashJoinPOperator(JoinKind kind, JoinPartitioningType partitioningType,
List<LogicalVariable> sideLeftOfEqualities, List<LogicalVariable> sideRightOfEqualities,
@@ -78,7 +79,7 @@
this.aveRecordsPerFrame = aveRecordsPerFrame;
this.fudgeFactor = fudgeFactor;
- LOGGER.fine("HybridHashJoinPOperator constructed with: JoinKind=" + kind + ", JoinPartitioningType="
+ LOGGER.debug("HybridHashJoinPOperator constructed with: JoinKind=" + kind + ", JoinPartitioningType="
+ partitioningType + ", List<LogicalVariable>=" + sideLeftOfEqualities + ", List<LogicalVariable>="
+ sideRightOfEqualities + ", int memSizeInFrames=" + memSizeInFrames + ", int maxInputSize0InFrames="
+ maxInputSizeInFrames + ", int aveRecordsPerFrame=" + aveRecordsPerFrame + ", double fudgeFactor="
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
index 7df8cc4..c63e8a1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/config/AlgebricksConfig.java
@@ -18,10 +18,11 @@
*/
package org.apache.hyracks.algebricks.core.config;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class AlgebricksConfig {
public static final boolean DEBUG = true;
public static final String ALGEBRICKS_LOGGER_NAME = "org.apache.hyracks.algebricks";
- public static final Logger ALGEBRICKS_LOGGER = Logger.getLogger(ALGEBRICKS_LOGGER_NAME);
+ public static final Logger ALGEBRICKS_LOGGER = LogManager.getLogger(ALGEBRICKS_LOGGER_NAME);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
index 2c2708b..947bac1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
@@ -20,7 +20,6 @@
import java.util.Collection;
import java.util.List;
-import java.util.logging.Logger;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
@@ -42,10 +41,12 @@
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public final class JobGenHelper {
- private static final Logger LOGGER = Logger.getLogger(JobGenHelper.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
@SuppressWarnings("rawtypes")
public static RecordDescriptor mkRecordDescriptor(IVariableTypeEnvironment env, IOperatorSchema opSchema,
@@ -58,7 +59,7 @@
for (LogicalVariable var : opSchema) {
Object t = env.getVarType(var);
if (t == null) {
- LOGGER.warning("No type for variable " + var);
+ LOGGER.warn("No type for variable " + var);
}
fields[i] = sdp.getSerializerDeserializer(t);
typeTraits[i] = ttp.getTypeTrait(t);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java
index cfb3db1..cad62c4 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/AbstractRuleController.java
@@ -66,7 +66,7 @@
}
private String getPlanString(Mutable<ILogicalOperator> opRef) throws AlgebricksException {
- if (AlgebricksConfig.ALGEBRICKS_LOGGER.isLoggable(Level.FINE) && context != null) {
+ if (AlgebricksConfig.ALGEBRICKS_LOGGER.isDebugEnabled() && context != null) {
LogicalOperatorPrettyPrintVisitor pvisitor = context.getPrettyPrintVisitor();
pvisitor.reset(new AlgebricksAppendable());
PlanPrettyPrinter.printOperator((AbstractLogicalOperator) opRef.getValue(), pvisitor, 0);
@@ -77,10 +77,10 @@
private void printRuleApplication(IAlgebraicRewriteRule rule, String beforePlan, String afterPlan)
throws AlgebricksException {
- if (AlgebricksConfig.ALGEBRICKS_LOGGER.isLoggable(Level.FINE)) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> Rule " + rule.getClass() + " fired.\n");
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> Before plan\n" + beforePlan + "\n");
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> After plan\n" + afterPlan + "\n");
+ if (AlgebricksConfig.ALGEBRICKS_LOGGER.isDebugEnabled()) {
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug(">>>> Rule " + rule.getClass() + " fired.\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug(">>>> Before plan\n" + beforePlan + "\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug(">>>> After plan\n" + afterPlan + "\n");
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
index f1fdec6..0a4b298 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
@@ -19,7 +19,6 @@
package org.apache.hyracks.algebricks.core.rewriter.base;
import java.util.List;
-import java.util.logging.Level;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -34,6 +33,7 @@
import org.apache.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
import org.apache.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
+import org.apache.logging.log4j.Level;
public class HeuristicOptimizer {
@@ -75,18 +75,18 @@
return;
}
if (AlgebricksConfig.DEBUG) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting logical optimizations.\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug("Starting logical optimizations.\n");
}
- logPlanAt("Logical Plan", Level.FINE);
+ logPlanAt("Logical Plan", Level.DEBUG);
runOptimizationSets(plan, logicalRewrites);
computeSchemaBottomUpForPlan(plan);
runPhysicalOptimizations(plan, physicalRewrites);
- logPlanAt("Optimized Plan", Level.FINE);
+ logPlanAt("Optimized Plan", Level.DEBUG);
}
private void logPlanAt(String name, Level lvl) throws AlgebricksException {
- if (AlgebricksConfig.ALGEBRICKS_LOGGER.isLoggable(lvl)) {
+ if (AlgebricksConfig.ALGEBRICKS_LOGGER.isEnabled(lvl)) {
final LogicalOperatorPrettyPrintVisitor pvisitor = context.getPrettyPrintVisitor();
pvisitor.reset(new AlgebricksAppendable());
PlanPrettyPrinter.printPlan(plan, pvisitor, 0);
@@ -127,7 +127,7 @@
List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> physicalRewrites)
throws AlgebricksException {
if (AlgebricksConfig.DEBUG) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting physical optimizations.\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug("Starting physical optimizations.\n");
}
// PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(plan);
runOptimizationSets(plan, physicalRewrites);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml b/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
index 192dfe1..b9653af 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
@@ -56,5 +56,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
index a1730ac7..6763e2b 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
@@ -123,7 +123,7 @@
// somewhere else, too.
physicalOptimizationConfig = context.getPhysicalOptimizationConfig();
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> Optimizing operator " + op.getPhysicalOperator() + ".\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug(">>>> Optimizing operator " + op.getPhysicalOperator() + ".\n");
PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
@@ -132,7 +132,7 @@
new LinkedList<ILocalStructuralProperty>());
boolean changed = physOptimizeOp(opRef, pvector, false, context);
op.computeDeliveredPhysicalProperties(context);
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Structural properties for " + op.getPhysicalOperator() + ": "
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + op.getPhysicalOperator() + ": "
+ op.getDeliveredPhysicalProperties() + "\n");
context.addToDontApplySet(this, opRef.getValue());
@@ -149,7 +149,7 @@
}
AbstractLogicalOperator op = (AbstractLogicalOperator) root.getValue();
op.computeDeliveredPhysicalProperties(context);
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Structural properties for " + op.getPhysicalOperator()
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + op.getPhysicalOperator()
+ ": " + op.getDeliveredPhysicalProperties() + "\n");
}
return changed;
@@ -250,7 +250,7 @@
IPhysicalPropertiesVector delivered = child.getDeliveredPhysicalProperties();
AlgebricksConfig.ALGEBRICKS_LOGGER
- .finest(">>>> Properties delivered by " + child.getPhysicalOperator() + ": " + delivered + "\n");
+ .trace(">>>> Properties delivered by " + child.getPhysicalOperator() + ": " + delivered + "\n");
IPartitioningRequirementsCoordinator prc = pr.getPartitioningCoordinator();
// Coordinates requirements by looking at the firstDeliveredPartitioning.
Pair<Boolean, IPartitioningProperty> pbpp = prc.coordinateRequirements(
@@ -260,7 +260,7 @@
new StructuralPropertiesVector(pbpp.second, requiredProperty.getLocalProperties());
AlgebricksConfig.ALGEBRICKS_LOGGER
- .finest(">>>> Required properties for " + child.getPhysicalOperator() + ": " + rqd + "\n");
+ .trace(">>>> Required properties for " + child.getPhysicalOperator() + ": " + rqd + "\n");
// The partitioning property of reqdProperties[childIndex] could be updated here because
// rqd.getPartitioningProperty() is the same object instance as requiredProperty.getPartitioningProperty().
IPhysicalPropertiesVector diff = delivered.getUnsatisfiedPropertiesFrom(rqd,
@@ -280,7 +280,7 @@
delivered = newChild.getDeliveredPhysicalProperties();
IPhysicalPropertiesVector newDiff =
newPropertiesDiff(newChild, rqd, mayExpandPartitioningProperties, context);
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> New properties diff: " + newDiff + "\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> New properties diff: " + newDiff + "\n");
if (isRedundantSort(opRef, delivered, newDiff, context)) {
opIsRedundantSort = true;
@@ -306,7 +306,7 @@
if (opIsRedundantSort) {
if (AlgebricksConfig.DEBUG) {
AlgebricksConfig.ALGEBRICKS_LOGGER
- .fine(">>>> Removing redundant SORT operator " + op.getPhysicalOperator() + "\n");
+ .trace(">>>> Removing redundant SORT operator " + op.getPhysicalOperator() + "\n");
printOp(op);
}
changed = true;
@@ -340,7 +340,7 @@
newChildEqClasses = context.getEquivalenceClassMap(newChild);
newChildFDs = context.getFDList(newChild);
}
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(
">>>> Required properties for new op. " + newChild.getPhysicalOperator() + ": " + required + "\n");
return newDelivered.getUnsatisfiedPropertiesFrom(required, mayExpandPartitioningProperties, newChildEqClasses,
@@ -446,7 +446,7 @@
}
AbstractLogicalOperator newChild = (AbstractLogicalOperator) op.getInputs().get(childIndex).getValue();
IPhysicalPropertiesVector newDiff = newPropertiesDiff(newChild, required, true, context);
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> New properties diff: " + newDiff + "\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> New properties diff: " + newDiff + "\n");
if (newDiff != null) {
addLocalEnforcers(op, childIndex, newDiff.getLocalProperties(), nestedPlan, context);
}
@@ -457,7 +457,7 @@
boolean nestedPlan, IOptimizationContext context) throws AlgebricksException {
if (AlgebricksConfig.DEBUG) {
AlgebricksConfig.ALGEBRICKS_LOGGER
- .fine(">>>> Adding local enforcers for local props = " + localProperties + "\n");
+ .trace(">>>> Adding local enforcers for local props = " + localProperties + "\n");
}
if (localProperties == null || localProperties.isEmpty()) {
@@ -524,7 +524,7 @@
oo.getInputs().add(topOp);
context.computeAndSetTypeEnvironmentForOperator(oo);
if (AlgebricksConfig.DEBUG) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine(">>>> Added sort enforcer " + oo.getPhysicalOperator() + ".\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Added sort enforcer " + oo.getPhysicalOperator() + ".\n");
}
return new MutableObject<ILogicalOperator>(oo);
}
@@ -608,7 +608,7 @@
context.computeAndSetTypeEnvironmentForOperator(exchg);
if (AlgebricksConfig.DEBUG) {
AlgebricksConfig.ALGEBRICKS_LOGGER
- .fine(">>>> Added partitioning enforcer " + exchg.getPhysicalOperator() + ".\n");
+ .debug(">>>> Added partitioning enforcer " + exchg.getPhysicalOperator() + ".\n");
printOp((AbstractLogicalOperator) op);
}
}
@@ -626,7 +626,7 @@
private void printOp(AbstractLogicalOperator op) throws AlgebricksException {
LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
PlanPrettyPrinter.printOperator(op, pvisitor, 0);
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine(pvisitor.get().toString());
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug(pvisitor.get().toString());
}
private List<OrderColumn> computeOrderColumns(IPhysicalPropertiesVector pv) {
@@ -655,7 +655,7 @@
newOp.recomputeSchema();
newOp.computeDeliveredPhysicalProperties(context);
context.computeAndSetTypeEnvironmentForOperator(newOp);
- AlgebricksConfig.ALGEBRICKS_LOGGER.finest(">>>> Structural properties for " + newOp.getPhysicalOperator() + ": "
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + newOp.getPhysicalOperator() + ": "
+ newOp.getDeliveredPhysicalProperties() + "\n");
PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(newOp, context);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
index 836f266..942f181 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
@@ -201,7 +201,7 @@
//retain the intersection
pkVars.retainAll(producedVars);
}
- AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);
+ AlgebricksConfig.ALGEBRICKS_LOGGER.debug("Found FD for introducing group-by: " + pkVars);
Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
LogicalVariable testForNull = null;
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/JoinUtils.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/JoinUtils.java
index 652f062..6efda52 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/JoinUtils.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/JoinUtils.java
@@ -107,13 +107,13 @@
LogicalPropertiesVisitor.computeLogicalPropertiesDFS(opBuild, context);
ILogicalPropertiesVector v = context.getLogicalPropertiesVector(opBuild);
AlgebricksConfig.ALGEBRICKS_LOGGER
- .fine("// HybridHashJoin inner branch -- Logical properties for " + opBuild + ": " + v + "\n");
+ .debug("// HybridHashJoin inner branch -- Logical properties for " + opBuild + ": " + v + "\n");
if (v != null) {
int size2 = v.getMaxOutputFrames();
HybridHashJoinPOperator hhj = (HybridHashJoinPOperator) op.getPhysicalOperator();
if (size2 > 0 && size2 * hhj.getFudgeFactor() <= hhj.getMemSizeInFrames()) {
AlgebricksConfig.ALGEBRICKS_LOGGER
- .fine("// HybridHashJoin inner branch " + opBuild + " fits in memory\n");
+ .debug("// HybridHashJoin inner branch " + opBuild + " fits in memory\n");
// maintains the local properties on the probe side
op.setPhysicalOperator(
new InMemoryHashJoinPOperator(hhj.getKind(), hhj.getPartitioningType(), hhj.getKeysLeftBranch(),
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
index 357b50c..a835569 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
@@ -181,5 +181,9 @@
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/pom.xml b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
index 5e3ccf7..71bf5fc 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
@@ -104,5 +104,9 @@
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
index 0b2cc9b..cd6362f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
@@ -24,8 +24,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.dataflow.ActivityId;
@@ -35,9 +33,11 @@
import org.apache.hyracks.api.job.ActivityClusterGraph;
import org.apache.hyracks.api.job.ActivityClusterId;
import org.apache.hyracks.api.job.JobActivityGraph;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ActivityClusterGraphBuilder {
- private static final Logger LOGGER = Logger.getLogger(ActivityClusterGraphBuilder.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public ActivityClusterGraphBuilder() {
}
@@ -146,8 +146,8 @@
}
acg.addActivityClusters(acList);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine(acg.toJSON().asText());
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(acg.toJSON().asText());
}
return acg;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 084626e..64bcf6e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -24,8 +24,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.dataflow.ActivityId;
@@ -37,9 +35,11 @@
import org.apache.hyracks.api.job.JobActivityGraph;
import org.apache.hyracks.api.job.JobFlag;
import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class JobActivityGraphBuilder implements IActivityGraphBuilder {
- private static final Logger LOGGER = Logger.getLogger(JobActivityGraphBuilder.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<ActivityId, IOperatorDescriptor> activityOperatorMap;
@@ -71,8 +71,8 @@
@Override
public void addSourceEdge(int operatorInputIndex, IActivity task, int taskInputIndex) {
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest("Adding source edge: " + task.getActivityId() + ":" + operatorInputIndex + " -> "
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace("Adding source edge: " + task.getActivityId() + ":" + operatorInputIndex + " -> "
+ task.getActivityId() + ":" + taskInputIndex);
}
IOperatorDescriptor op = activityOperatorMap.get(task.getActivityId());
@@ -83,8 +83,8 @@
@Override
public void addTargetEdge(int operatorOutputIndex, IActivity task, int taskOutputIndex) {
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest("Adding target edge: " + task.getActivityId() + ":" + operatorOutputIndex + " -> "
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace("Adding target edge: " + task.getActivityId() + ":" + operatorOutputIndex + " -> "
+ task.getActivityId() + ":" + taskOutputIndex);
}
IOperatorDescriptor op = activityOperatorMap.get(task.getActivityId());
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
index ddcaab3..80ff77c 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
@@ -22,7 +22,8 @@
import java.util.List;
import java.util.Set;
import java.util.function.Predicate;
-import java.util.logging.Level;
+
+import org.apache.logging.log4j.Level;
/**
* Accessor for the data contained in the global application configuration file.
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
index 7ae7cbf..58b4b27 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/HyracksDataException.java
@@ -20,10 +20,11 @@
package org.apache.hyracks.api.exceptions;
import java.io.Serializable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.util.ErrorMessageUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* The main execution time exception type for runtime errors in a hyracks environment
@@ -31,7 +32,7 @@
public class HyracksDataException extends HyracksException {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(HyracksDataException.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static HyracksDataException create(Throwable cause) {
if (cause instanceof HyracksDataException || cause == null) {
@@ -41,7 +42,7 @@
throw (Error) cause;
} else if (cause instanceof InterruptedException && !Thread.currentThread().isInterrupted()) {
// TODO(mblow): why not force interrupt on current thread?
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Wrapping an InterruptedException in HyracksDataException and current thread is not interrupted",
cause);
}
@@ -66,7 +67,7 @@
throw (Error) th;
} else if (th instanceof InterruptedException && !Thread.currentThread().isInterrupted()) {
// TODO(mblow): why not force interrupt on current thread?
- LOGGER.log(Level.WARNING, "Suppressing an InterruptedException in a HyracksDataException and current "
+ LOGGER.log(Level.WARN, "Suppressing an InterruptedException in a HyracksDataException and current "
+ "thread is not interrupted", th);
}
root.addSuppressed(th);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
index f5b4417..6d5d246 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
@@ -25,10 +25,11 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LifeCycleComponentManager implements ILifeCycleComponentManager {
@@ -36,7 +37,7 @@
public static final String DUMP_PATH_KEY = "DUMP_PATH";
}
- private static final Logger LOGGER = Logger.getLogger(LifeCycleComponentManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final List<ILifeCycleComponent> components;
private boolean stopInitiated;
@@ -54,7 +55,7 @@
@Override
public void uncaughtException(Thread t, Throwable e) {
try {
- LOGGER.log(Level.SEVERE, "Uncaught Exception from thread " + t.getName() + ". Calling shutdown hook", e);
+ LOGGER.log(Level.ERROR, "Uncaught Exception from thread " + t.getName() + ". Calling shutdown hook", e);
} finally {
ExitUtil.exit(99);
}
@@ -79,7 +80,7 @@
return;
}
stopped = true;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Attempting to stop " + this);
}
if (stopInitiated) {
@@ -87,14 +88,14 @@
return;
}
if (!configured) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.severe("Lifecycle management not configured " + this);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.error("Lifecycle management not configured " + this);
}
return;
}
stopInitiated = true;
- LOGGER.severe("Stopping instance");
+ LOGGER.error("Stopping instance");
FileOutputStream componentDumpStream = null;
String componentDumpPath = null;
@@ -110,13 +111,13 @@
}
componentDumpStream = new FileOutputStream(f);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Stopping component instance " + component.getClass().getName() + "; dump state: "
+ dumpState + ", dump path: " + componentDumpPath);
}
component.stop(dumpState, componentDumpStream);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Exception in stopping component " + component.getClass().getName(), e);
+ LOGGER.log(Level.ERROR, "Exception in stopping component " + component.getClass().getName(), e);
} finally {
if (componentDumpStream != null) {
componentDumpStream.close();
@@ -132,11 +133,11 @@
dumpPath = configuration.get(Config.DUMP_PATH_KEY);
if (dumpPath == null) {
dumpPath = System.getProperty("user.dir");
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("dump path not configured. Using current directory " + dumpPath);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("dump path not configured. Using current directory " + dumpPath);
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("LifecycleComponentManager configured " + this);
}
configured = true;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
index e9491f3..26ce2c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
@@ -26,12 +26,14 @@
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ErrorMessageUtil {
- private static final Logger LOGGER = Logger.getLogger(ErrorMessageUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final String NONE = "";
private static final String COMMA = ",";
@@ -99,7 +101,7 @@
return fmt.out().toString();
} catch (Exception e) {
// Do not throw further exceptions during exception processing.
- LOGGER.log(Level.WARNING, e.getLocalizedMessage(), e);
+ LOGGER.log(Level.WARN, e.getLocalizedMessage(), e);
return e.getMessage();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java
index 21965c7..9eb3b8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SingleThreadEventProcessor.java
@@ -19,15 +19,16 @@
package org.apache.hyracks.api.util;
import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class SingleThreadEventProcessor<T> implements Runnable {
- private static final Logger LOGGER = Logger.getLogger(SingleThreadEventProcessor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final String name;
private final LinkedBlockingQueue<T> eventInbox;
private volatile Thread executorThread;
@@ -50,10 +51,10 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Error handling an event", e);
+ LOGGER.log(Level.ERROR, "Error handling an event", e);
}
}
- LOGGER.log(Level.WARNING, "Stopped " + Thread.currentThread().getName());
+ LOGGER.log(Level.WARN, "Stopped " + Thread.currentThread().getName());
}
protected abstract void handle(T event) throws Exception; //NOSONAR
@@ -71,7 +72,7 @@
int attempt = 0;
while (executorThread.isAlive()) {
attempt++;
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Failed to stop event processor after " + attempt + " attempts. Interrupted exception swallowed?");
if (attempt == 10) {
throw HyracksDataException.create(ErrorCode.FAILED_TO_SHUTDOWN_EVENT_PROCESSOR, name);
diff --git a/hyracks-fullstack/hyracks/hyracks-client/pom.xml b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
index 81c9963..16039ab 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
@@ -109,5 +109,9 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
index 31fd379..fc5708d 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
@@ -25,8 +25,6 @@
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.channels.IInputChannel;
import org.apache.hyracks.api.comm.FrameHelper;
@@ -45,10 +43,13 @@
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.client.net.ClientNetworkManager;
import org.apache.hyracks.comm.channels.DatasetNetworkInputChannel;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
// TODO(madhusudancs): Should this implementation be moved to org.apache.hyracks.client?
public class HyracksDatasetReader implements IHyracksDatasetReader {
- private static final Logger LOGGER = Logger.getLogger(HyracksDatasetReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection;
@@ -94,10 +95,10 @@
return datasetDirectoryServiceConnection.getDatasetResultStatus(jobId, resultSetId);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.NO_RESULT_SET) {
- LOGGER.log(Level.WARNING, "Exception retrieving result set for job " + jobId, e);
+ LOGGER.log(Level.WARN, "Exception retrieving result set for job " + jobId, e);
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception retrieving result set for job " + jobId, e);
+ LOGGER.log(Level.WARN, "Exception retrieving result set for job " + jobId, e);
}
return null;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/pom.xml b/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
index 3ca803c..db36094 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-comm/pom.xml
@@ -51,5 +51,9 @@
<artifactId>hyracks-net</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java
index e3c6f4a..c334389 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java
+++ b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/DatasetNetworkInputChannel.java
@@ -22,8 +22,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.channels.IInputChannel;
import org.apache.hyracks.api.channels.IInputChannelMonitor;
@@ -34,9 +32,11 @@
import org.apache.hyracks.api.dataset.ResultSetId;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetNetworkInputChannel implements IInputChannel {
- private static final Logger LOGGER = Logger.getLogger(DatasetNetworkInputChannel.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
static final int INITIAL_MESSAGE_SIZE = 20;
@@ -114,8 +114,8 @@
writeBuffer.putLong(resultSetId.getId());
writeBuffer.putInt(partition);
writeBuffer.flip();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Sending partition request for JobId: " + jobId + " partition: " + partition + " on channel: "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Sending partition request for JobId: " + jobId + " partition: " + partition + " on channel: "
+ ccb);
}
ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
diff --git a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java
index a846da3..0a9342a 100644
--- a/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java
+++ b/hyracks-fullstack/hyracks/hyracks-comm/src/main/java/org/apache/hyracks/comm/channels/NetworkInputChannel.java
@@ -22,8 +22,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.channels.IInputChannel;
import org.apache.hyracks.api.channels.IInputChannelMonitor;
@@ -33,9 +31,11 @@
import org.apache.hyracks.api.context.IHyracksCommonContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.partitions.PartitionId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NetworkInputChannel implements IInputChannel {
- private static final Logger LOGGER = Logger.getLogger(NetworkInputChannel.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
static final int INITIAL_MESSAGE_SIZE = 20;
@@ -107,8 +107,8 @@
writeBuffer.putInt(partitionId.getSenderIndex());
writeBuffer.putInt(partitionId.getReceiverIndex());
writeBuffer.flip();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Sending partition request: " + partitionId + " on channel: " + ccb);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Sending partition request: " + partitionId + " on channel: " + ccb);
}
ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
ccb.getWriteInterface().getFullBufferAcceptor().close();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index fce37dd..763c5ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -93,5 +93,13 @@
<version>2.0.2-beta</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-core</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java
index b2478a3..a7a64cc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/BaseCCApplication.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.control.cc;
import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.ICCApplication;
import org.apache.hyracks.api.application.IServiceContext;
@@ -31,9 +29,13 @@
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.control.common.controllers.ControllerConfig;
import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.config.Configurator;
public class BaseCCApplication implements ICCApplication {
- private static final Logger LOGGER = Logger.getLogger(BaseCCApplication.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final ICCApplication INSTANCE = new BaseCCApplication();
private IConfigManager configManager;
@@ -83,7 +85,7 @@
protected void configureLoggingLevel(Level level) {
LOGGER.info("Setting Hyracks log level to " + level);
- Logger.getLogger("org.apache.hyracks").setLevel(level);
+ Configurator.setLevel("org.apache.hyracks", level);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
index a78f6bb..a188594 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
@@ -22,17 +22,18 @@
import java.io.IOException;
import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.ICCApplication;
import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.config.ConfigUtils;
import org.apache.hyracks.control.common.controllers.CCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.kohsuke.args4j.CmdLineException;
public class CCDriver {
- private static final Logger LOGGER = Logger.getLogger(CCDriver.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private CCDriver() {
}
@@ -49,10 +50,10 @@
Thread.sleep(100000);
}
} catch (CmdLineException e) {
- LOGGER.log(Level.FINE, "Exception parsing command line: " + Arrays.toString(args), e);
+ LOGGER.log(Level.DEBUG, "Exception parsing command line: " + Arrays.toString(args), e);
System.exit(2);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Exiting CCDriver due to exception", e);
+ LOGGER.log(Level.ERROR, "Exiting CCDriver due to exception", e);
System.exit(1);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
index 4e3c0f5..ccf798a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
@@ -18,9 +18,6 @@
*/
package org.apache.hyracks.control.cc;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.client.HyracksClientInterfaceFunctions;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.dataset.DatasetJobRecord.Status;
@@ -48,10 +45,13 @@
import org.apache.hyracks.ipc.api.IIPCHandle;
import org.apache.hyracks.ipc.api.IIPCI;
import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
class ClientInterfaceIPCI implements IIPCI {
- private static final Logger LOGGER = Logger.getLogger(ClientInterfaceIPCI.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final JobIdFactory jobIdFactory;
private final DeployedJobSpecIdFactory deployedJobSpecIdFactory;
@@ -70,7 +70,7 @@
try {
handle.send(mid, ccs.getClusterControllerInfo(), null);
} catch (IPCException e) {
- LOGGER.log(Level.WARNING, "Error sending response to GET_CLUSTER_CONTROLLER_INFO request", e);
+ LOGGER.log(Level.WARN, "Error sending response to GET_CLUSTER_CONTROLLER_INFO request", e);
}
break;
case GET_JOB_STATUS:
@@ -146,7 +146,7 @@
try {
handle.send(mid, ccs.getCCContext().getClusterTopology(), null);
} catch (IPCException e) {
- LOGGER.log(Level.WARNING, "Error sending response to GET_CLUSTER_TOPOLOGY request", e);
+ LOGGER.log(Level.WARN, "Error sending response to GET_CLUSTER_TOPOLOGY request", e);
}
break;
case CLI_DEPLOY_BINARY:
@@ -184,7 +184,7 @@
try {
handle.send(mid, null, new IllegalArgumentException("Unknown function " + fn.getFunctionId()));
} catch (IPCException e) {
- LOGGER.log(Level.WARNING, "Error sending Unknown function response", e);
+ LOGGER.log(Level.WARN, "Error sending Unknown function response", e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
index 5a53fce..ad0cb61 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
@@ -19,7 +19,6 @@
package org.apache.hyracks.control.cc;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.control.cc.work.ApplicationMessageWork;
@@ -48,9 +47,11 @@
import org.apache.hyracks.control.common.work.IResultCallback;
import org.apache.hyracks.ipc.api.IIPCHandle;
import org.apache.hyracks.ipc.api.IIPCI;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
class ClusterControllerIPCI implements IIPCI {
- private static final Logger LOGGER = Logger.getLogger(ClusterControllerIPCI.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
ClusterControllerIPCI(ClusterControllerService ccs) {
@@ -161,7 +162,7 @@
.schedule(new NotifyThreadDumpResponse(ccs, tdrf.getRequestId(), tdrf.getThreadDumpJSON()));
break;
default:
- LOGGER.warning("Unknown function: " + fn.getFunctionId());
+ LOGGER.warn("Unknown function: " + fn.getFunctionId());
}
}
}
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index fd53c25..360975d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -36,8 +36,6 @@
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.application.ICCApplication;
@@ -87,10 +85,13 @@
import org.apache.hyracks.ipc.impl.IPCSystem;
import org.apache.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.xml.sax.InputSource;
public class ClusterControllerService implements IControllerService {
- private static final Logger LOGGER = Logger.getLogger(ClusterControllerService.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final CCConfig ccConfig;
@@ -243,8 +244,8 @@
jobManager = (IJobManager) jobManagerConstructor.newInstance(ccConfig, this, jobCapacityController);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
| InvocationTargetException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "class " + ccConfig.getJobManagerClass() + " could not be used: ", e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "class " + ccConfig.getJobManagerClass() + " could not be used: ", e);
}
// Falls back to the default implementation if the user-provided class name is not valid.
jobManager = new JobManager(ccConfig, this, jobCapacityController);
@@ -278,12 +279,12 @@
@Override
public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException {
// no-op, we don't care
- LOGGER.log(Level.WARNING, "Getting notified that node: " + nodeId + " has joined. and we don't care");
+ LOGGER.log(Level.WARN, "Getting notified that node: " + nodeId + " has joined. and we don't care");
}
@Override
public void notifyNodeFailure(Collection<String> deadNodeIds) throws HyracksException {
- LOGGER.log(Level.WARNING, "Getting notified that nodes: " + deadNodeIds + " has failed");
+ LOGGER.log(Level.WARN, "Getting notified that nodes: " + deadNodeIds + " has failed");
}
});
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index 4928564..590a0f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -30,7 +30,6 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.client.NodeControllerInfo;
@@ -49,9 +48,11 @@
import org.apache.hyracks.control.common.ipc.CCNCFunctions.AbortCCJobsFunction;
import org.apache.hyracks.ipc.api.IIPCHandle;
import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NodeManager implements INodeManager {
- private static final Logger LOGGER = Logger.getLogger(NodeManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final CCConfig ccConfig;
@@ -89,13 +90,13 @@
@Override
public void addNode(String nodeId, NodeControllerState ncState) throws HyracksException {
- LOGGER.warning("addNode(" + nodeId + ") called");
+ LOGGER.warn("addNode(" + nodeId + ") called");
if (nodeId == null || ncState == null) {
throw HyracksException.create(ErrorCode.INVALID_INPUT_PARAMETER);
}
// Updates the node registry.
if (nodeRegistry.containsKey(nodeId)) {
- LOGGER.warning(
+ LOGGER.warn(
"Node with name " + nodeId + " has already registered; failing the node then re-registering.");
removeDeadNode(nodeId);
} else {
@@ -106,7 +107,7 @@
throw HyracksDataException.create(e);
}
}
- LOGGER.warning("adding node to registry");
+ LOGGER.warn("adding node to registry");
nodeRegistry.put(nodeId, ncState);
// Updates the IP address to node names map.
try {
@@ -119,7 +120,7 @@
throw e;
}
// Updates the cluster capacity.
- LOGGER.warning("updating cluster capacity");
+ LOGGER.warn("updating cluster capacity");
resourceManager.update(nodeId, ncState.getCapacity());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java
index 1cb07d0..a57baf5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/dataset/DatasetDirectoryService.java
@@ -26,8 +26,6 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.dataset.DatasetDirectoryRecord;
@@ -44,6 +42,9 @@
import org.apache.hyracks.api.job.JobStatus;
import org.apache.hyracks.control.common.dataset.ResultStateSweeper;
import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* TODO(madhusudancs): The potential perils of this global dataset directory service implementation is that, the jobs
@@ -54,7 +55,7 @@
*/
public class DatasetDirectoryService implements IDatasetDirectoryService {
- private static final Logger LOGGER = Logger.getLogger(DatasetDirectoryService.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final long resultTTL;
@@ -75,7 +76,7 @@
@Override
public synchronized void notifyJobCreation(JobId jobId, JobSpecification spec) throws HyracksException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(getClass().getSimpleName() + " notified of new job " + jobId);
}
if (jobResultLocations.get(jobId) != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index 04166a4..ea37cdd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -27,8 +27,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
@@ -51,9 +49,11 @@
import org.apache.hyracks.control.cc.job.Task;
import org.apache.hyracks.control.cc.job.TaskCluster;
import org.apache.hyracks.control.cc.job.TaskClusterId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
class ActivityClusterPlanner {
- private static final Logger LOGGER = Logger.getLogger(ActivityClusterPlanner.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final JobExecutor executor;
@@ -74,7 +74,7 @@
TaskCluster[] taskClusters = computeTaskClusters(ac, jobRun, activityPlanMap);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Plan for " + ac);
LOGGER.info("Built " + taskClusters.length + " Task Clusters");
for (TaskCluster tc : taskClusters) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
index ab7a3db..ac06344 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
@@ -28,8 +28,6 @@
import java.util.PriorityQueue;
import java.util.Random;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.constraints.Constraint;
@@ -68,9 +66,12 @@
import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
import org.apache.hyracks.control.common.work.IResultCallback;
import org.apache.hyracks.control.common.work.NoOpCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class JobExecutor {
- private static final Logger LOGGER = Logger.getLogger(JobExecutor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
@@ -196,7 +197,7 @@
Set<TaskCluster> taskClusterRoots = new HashSet<>();
findRunnableTaskClusterRoots(taskClusterRoots,
jobRun.getActivityClusterGraph().getActivityClusterMap().values());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO,
"Runnable TC roots: " + taskClusterRoots + ", inProgressTaskClusters: " + inProgressTaskClusters);
}
@@ -226,19 +227,19 @@
queue.add(new RankedRunnableTaskCluster(priority, tc));
}
}
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Ranked TCs: " + queue);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Ranked TCs: " + queue);
}
Map<String, List<TaskAttemptDescriptor>> taskAttemptMap = new HashMap<>();
for (RankedRunnableTaskCluster rrtc : queue) {
TaskCluster tc = rrtc.getTaskCluster();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Found runnable TC: " + tc);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Found runnable TC: " + tc);
List<TaskClusterAttempt> attempts = tc.getAttempts();
- LOGGER.fine("Attempts so far:" + attempts.size());
+ LOGGER.debug("Attempts so far:" + attempts.size());
for (TaskClusterAttempt tcAttempt : attempts) {
- LOGGER.fine("Status: " + tcAttempt.getStatus());
+ LOGGER.debug("Status: " + tcAttempt.getStatus());
}
}
assignTaskLocations(tc, taskAttemptMap);
@@ -258,16 +259,16 @@
* Runnability(Non-schedulable TaskCluster) = {NOT_RUNNABLE, _}
*/
private Runnability assignRunnabilityRank(TaskCluster goal, Map<TaskCluster, Runnability> runnabilityMap) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Computing runnability: " + goal);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Computing runnability: " + goal);
}
if (runnabilityMap.containsKey(goal)) {
return runnabilityMap.get(goal);
}
TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(goal);
if (lastAttempt != null) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Last Attempt Status: " + lastAttempt.getStatus());
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Last Attempt Status: " + lastAttempt.getStatus());
}
if (lastAttempt.getStatus() == TaskClusterAttempt.TaskClusterStatus.COMPLETED) {
Runnability runnability = new Runnability(Runnability.Tag.COMPLETED, Integer.MIN_VALUE);
@@ -284,15 +285,15 @@
PartitionMatchMaker pmm = jobRun.getPartitionMatchMaker();
Runnability aggregateRunnability = new Runnability(Runnability.Tag.RUNNABLE, 0);
for (PartitionId pid : goal.getRequiredPartitions()) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Inspecting required partition: " + pid);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Inspecting required partition: " + pid);
}
Runnability runnability;
ConnectorDescriptorId cdId = pid.getConnectorDescriptorId();
IConnectorPolicy cPolicy = connectorPolicyMap.get(cdId);
PartitionState maxState = pmm.getMaximumAvailableState(pid);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Policy: " + cPolicy + " maxState: " + maxState);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Policy: " + cPolicy + " maxState: " + maxState);
}
if (PartitionState.COMMITTED.equals(maxState)) {
runnability = new Runnability(Runnability.Tag.RUNNABLE, 0);
@@ -328,8 +329,8 @@
// already not runnable -- cannot get better. bail.
break;
}
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("aggregateRunnability: " + aggregateRunnability);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("aggregateRunnability: " + aggregateRunnability);
}
}
runnabilityMap.put(goal, aggregateRunnability);
@@ -511,8 +512,8 @@
if (node != null) {
node.getActiveJobIds().add(jobRun.getJobId());
boolean changed = jobRun.getParticipatingNodeIds().add(nodeId);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Starting: " + taskDescriptors + " at " + entry.getKey());
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Starting: " + taskDescriptors + " at " + entry.getKey());
}
byte[] jagBytes = changed ? acgBytes : null;
node.getNodeController().startTasks(deploymentId, jobId, jagBytes, taskDescriptors,
@@ -537,14 +538,14 @@
private void abortTaskCluster(TaskClusterAttempt tcAttempt,
TaskClusterAttempt.TaskClusterStatus failedOrAbortedStatus) {
- LOGGER.fine("Aborting task cluster: " + tcAttempt.getAttempt());
+ LOGGER.debug("Aborting task cluster: " + tcAttempt.getAttempt());
Set<TaskAttemptId> abortTaskIds = new HashSet<>();
Map<String, List<TaskAttemptId>> abortTaskAttemptMap = new HashMap<>();
for (TaskAttempt ta : tcAttempt.getTaskAttempts().values()) {
TaskAttemptId taId = ta.getTaskAttemptId();
TaskAttempt.TaskStatus status = ta.getStatus();
abortTaskIds.add(taId);
- LOGGER.fine("Checking " + taId + ": " + ta.getStatus());
+ LOGGER.debug("Checking " + taId + ": " + ta.getStatus());
if (status == TaskAttempt.TaskStatus.RUNNING || status == TaskAttempt.TaskStatus.COMPLETED) {
ta.setStatus(TaskAttempt.TaskStatus.ABORTED, null);
ta.setEndTime(System.currentTimeMillis());
@@ -564,13 +565,13 @@
abortTaskAttemptMap.forEach((key, abortTaskAttempts) -> {
final NodeControllerState node = nodeManager.getNodeControllerState(key);
if (node != null) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Aborting: " + abortTaskAttempts + " at " + key);
}
try {
node.getNodeController().abortTasks(jobId, abortTaskAttempts);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
}
}
});
@@ -648,13 +649,13 @@
TaskCluster tc = ta.getTask().getTaskCluster();
TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(tc);
if (lastAttempt == null || taId.getAttempt() != lastAttempt.getAttempt()) {
- LOGGER.warning(() -> "Ignoring task complete notification: " + taId + " -- Current last attempt = "
+ LOGGER.warn(() -> "Ignoring task complete notification: " + taId + " -- Current last attempt = "
+ lastAttempt);
return;
}
TaskAttempt.TaskStatus taStatus = ta.getStatus();
if (taStatus != TaskAttempt.TaskStatus.RUNNING) {
- LOGGER.warning(() -> "Spurious task complete notification: " + taId + " Current state = " + taStatus);
+ LOGGER.warn(() -> "Spurious task complete notification: " + taId + " Current state = " + taStatus);
return;
}
ta.setStatus(TaskAttempt.TaskStatus.COMPLETED, null);
@@ -666,7 +667,7 @@
startRunnableActivityClusters();
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e, () -> "Unexpected failure. Aborting job " + jobRun.getJobId());
+ LOGGER.error(() -> "Unexpected failure. Aborting job " + jobRun.getJobId(), e);
abortJob(Collections.singletonList(e), NoOpCallback.INSTANCE);
}
}
@@ -701,7 +702,7 @@
LOGGER.log(Level.INFO, "We will try to start runnable activity clusters of " + ta.getTaskAttemptId());
startRunnableActivityClusters();
} else {
- LOGGER.warning(
+ LOGGER.warn(
"Ignoring task failure notification: " + taId + " -- Current last attempt = " + lastAttempt);
}
} catch (Exception e) {
@@ -729,7 +730,7 @@
ta -> HyracksException.create(ErrorCode.NODE_FAILED, ta.getNodeId()));
startRunnableActivityClusters();
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e, () -> "Unexpected failure. Aborting job " + jobRun.getJobId());
+ LOGGER.error(() -> "Unexpected failure. Aborting job " + jobRun.getJobId(), e);
abortJob(Collections.singletonList(e), NoOpCallback.INSTANCE);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
index 7f1100b..ffb72c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
@@ -29,8 +29,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksException;
@@ -48,6 +46,9 @@
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.control.common.work.IResultCallback;
import org.apache.hyracks.control.common.work.NoOpCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -55,7 +56,7 @@
// Job manager manages all jobs that haven been submitted to the cluster.
public class JobManager implements IJobManager {
- private static final Logger LOGGER = Logger.getLogger(JobManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final Map<JobId, JobRun> activeRunMap;
@@ -74,8 +75,8 @@
jobQueue = (IJobQueue) jobQueueConstructor.newInstance(this, this.jobCapacityController);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
| InvocationTargetException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "class " + ccConfig.getJobQueueClass() + " could not be used: ", e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "class " + ccConfig.getJobQueueClass() + " could not be used: ", e);
}
// Falls back to the default implementation if the user-provided class name is not valid.
jobQueue = new FIFOJobQueue(this, jobCapacityController);
@@ -158,7 +159,7 @@
return;
}
if (run.getPendingStatus() != null) {
- LOGGER.warning("Ignoring duplicate cleanup for JobRun with id: " + jobId);
+ LOGGER.warn("Ignoring duplicate cleanup for JobRun with id: " + jobId);
return;
}
Set<String> targetNodes = run.getParticipatingNodeIds();
@@ -179,7 +180,7 @@
ncs.getNodeController().cleanUpJoblet(jobId, status);
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
if (caughtException == null) {
caughtException = HyracksException.create(e);
} else {
@@ -212,7 +213,7 @@
try {
serviceCtx.notifyJobFinish(jobId, run.getPendingStatus(), run.getPendingExceptions());
} catch (HyracksException e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
caughtException = e;
}
}
@@ -229,7 +230,7 @@
try {
ccs.getJobLogFile().log(createJobLogObject(run));
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
if (caughtException == null) {
caughtException = new HyracksException(e);
} else {
@@ -320,7 +321,7 @@
try {
run.getExecutor().startJob();
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Aborting " + run.getJobId() + " due to failure during job start", e);
+ LOGGER.log(Level.ERROR, "Aborting " + run.getJobId() + " due to failure during job start", e);
final List<Exception> exceptions = Collections.singletonList(e);
// fail the job then abort it
run.setStatus(JobStatus.FAILURE, exceptions);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
index 3a5e3be..c5e51a6 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
@@ -25,7 +25,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
@@ -33,9 +32,11 @@
import org.apache.hyracks.control.common.job.PartitionDescriptor;
import org.apache.hyracks.control.common.job.PartitionRequest;
import org.apache.hyracks.control.common.job.PartitionState;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class PartitionMatchMaker {
- private static final Logger LOGGER = Logger.getLogger(PartitionMatchMaker.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<PartitionId, List<PartitionDescriptor>> partitionDescriptors;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java
index da13091..2a6bdae 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/FIFOJobQueue.java
@@ -26,8 +26,6 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksException;
@@ -39,6 +37,9 @@
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.util.annotations.NotThreadSafe;
import org.apache.hyracks.util.annotations.ThreadSafetyGuaranteedBy;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* An implementation of IJobQueue that gives more priority to jobs that are submitted earlier.
@@ -47,7 +48,7 @@
@ThreadSafetyGuaranteedBy("JobManager")
public class FIFOJobQueue implements IJobQueue {
- private static final Logger LOGGER = Logger.getLogger(FIFOJobQueue.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<JobId, JobRun> jobListMap = new LinkedHashMap<>();
private final IJobManager jobManager;
@@ -104,7 +105,7 @@
// Fails the job.
jobManager.prepareComplete(run, JobStatus.FAILURE_BEFORE_EXECUTION, exceptions);
} catch (HyracksException e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java
index 2b0382b..f400978 100755
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/ApplicationInstallationHandler.java
@@ -24,8 +24,6 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
@@ -35,6 +33,9 @@
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http.HttpMethod;
@@ -42,7 +43,7 @@
public class ApplicationInstallationHandler extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ApplicationInstallationHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ClusterControllerService ccs;
@@ -81,7 +82,7 @@
response.setStatus(HttpResponseStatus.METHOD_NOT_ALLOWED);
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unhandled exception ", e);
+ LOGGER.log(Level.WARN, "Unhandled exception ", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
index b39915f..479004d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
@@ -20,13 +20,14 @@
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -35,7 +36,7 @@
public class JSONOutputRequestHandler extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(JSONOutputRequestHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IJSONOutputFunction fn;
public JSONOutputRequestHandler(ConcurrentMap<String, Object> ctx, String[] paths, IJSONOutputFunction fn) {
@@ -63,7 +64,7 @@
try {
return fn.invoke(host, servletPath, parts);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception invoking " + fn.getClass().getName(), e);
+ LOGGER.log(Level.WARN, "Exception invoking " + fn.getClass().getName(), e);
response.setStatus(HttpResponseStatus.BAD_REQUEST);
response.writer().print(e.getMessage());
}
@@ -77,7 +78,7 @@
om.writer().writeValue(response.writer(), result);
response.setStatus(HttpResponseStatus.OK);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Exception delivering result in " + getClass().getName(), e);
+ LOGGER.log(Level.WARN, "Exception delivering result in " + getClass().getName(), e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
response.writer().print(e.getMessage());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java
index 341834c..392046d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ApplicationMessageWork.java
@@ -18,21 +18,21 @@
*/
package org.apache.hyracks.control.cc.work;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.deployment.DeploymentId;
import org.apache.hyracks.api.messages.IMessage;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.deployment.DeploymentUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* @author rico
*/
public class ApplicationMessageWork extends AbstractHeartbeatWork {
- private static final Logger LOGGER = Logger.getLogger(ApplicationMessageWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private byte[] message;
private DeploymentId deploymentId;
private String nodeId;
@@ -63,7 +63,7 @@
}
});
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Error in stats reporting", e);
+ LOGGER.log(Level.WARN, "Error in stats reporting", e);
throw new RuntimeException(e);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
index 613efad..b44c58c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.control.cc.work;
import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
@@ -31,9 +29,12 @@
import org.apache.hyracks.control.common.work.SynchronizableWork;
import org.apache.hyracks.ipc.exceptions.IPCException;
import org.apache.hyracks.util.ExitUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ClusterShutdownWork extends SynchronizableWork {
- private static final Logger LOGGER = Logger.getLogger(ClusterShutdownWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final boolean terminateNCService;
@@ -76,7 +77,7 @@
/*
* best effort - just exit, user will have to kill misbehaving NCs
*/
- LOGGER.severe("Clean shutdown of NCs timed out- giving up; unresponsive nodes: " +
+ LOGGER.error("Clean shutdown of NCs timed out- giving up; unresponsive nodes: " +
shutdownStatus.getRemainingNodes());
}
callback.setValue(cleanShutdown);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
index 407f9cd..e1b59e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
@@ -22,8 +22,6 @@
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
@@ -31,9 +29,12 @@
import org.apache.hyracks.util.ThreadDumpUtil;
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class GetThreadDumpWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(GetThreadDumpWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final int TIMEOUT_SECS = 60;
private final ClusterControllerService ccs;
@@ -56,7 +57,7 @@
try {
callback.setValue(ThreadDumpUtil.takeDumpJSONString());
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception taking CC thread dump", e);
+ LOGGER.log(Level.WARN, "Exception taking CC thread dump", e);
callback.setException(e);
}
} else {
@@ -82,7 +83,7 @@
Thread.sleep(sleepTime);
}
if (ccs.removeThreadDumpRun(run.getRequestId()) != null) {
- LOGGER.log(Level.WARNING, "Timed out thread dump request " + run.getRequestId()
+ LOGGER.log(Level.WARN, "Timed out thread dump request " + run.getRequestId()
+ " for node " + nodeId);
callback.setException(new TimeoutException("Thread dump request for node " + nodeId
+ " timed out after " + TIMEOUT_SECS + " seconds."));
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
index bb85c13..f847cdb 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
@@ -20,8 +20,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.JobId;
@@ -30,9 +28,11 @@
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class JobCleanupWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(JobCleanupWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private IJobManager jobManager;
private JobId jobId;
@@ -51,7 +51,7 @@
@Override
public void run() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Cleanup for JobRun with id: " + jobId);
}
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
index 337e88f..b3b33c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
@@ -21,8 +21,6 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.JobId;
@@ -32,9 +30,12 @@
import org.apache.hyracks.control.cc.cluster.INodeManager;
import org.apache.hyracks.control.cc.job.IJobManager;
import org.apache.hyracks.control.cc.job.JobRun;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class JobletCleanupNotificationWork extends AbstractHeartbeatWork {
- private static final Logger LOGGER = Logger.getLogger(JobletCleanupNotificationWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ClusterControllerService ccs;
private JobId jobId;
@@ -52,12 +53,12 @@
IJobManager jobManager = ccs.getJobManager();
final JobRun run = jobManager.get(jobId);
if (run == null) {
- LOGGER.log(Level.WARNING, () -> "ignoring unknown job " + jobId + " on notification from " + nodeId);
+ LOGGER.log(Level.WARN, () -> "ignoring unknown job " + jobId + " on notification from " + nodeId);
return;
}
Set<String> cleanupPendingNodes = run.getCleanupPendingNodeIds();
if (!cleanupPendingNodes.remove(nodeId)) {
- LOGGER.log(Level.WARNING, () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes +
+ LOGGER.log(Level.WARN, () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes +
" for job " + jobId);
return;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java
index 120f415..5c98035 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NodeHeartbeatWork.java
@@ -18,10 +18,9 @@
*/
package org.apache.hyracks.control.cc.work;
-import java.util.logging.Level;
-
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.heartbeat.HeartbeatData;
+import org.apache.logging.log4j.Level;
public class NodeHeartbeatWork extends AbstractHeartbeatWork {
@@ -36,6 +35,6 @@
@Override
public Level logLevel() {
- return Level.FINEST;
+ return Level.TRACE;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java
index 83cbb91..d9f37fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyShutdownWork.java
@@ -19,15 +19,15 @@
package org.apache.hyracks.control.cc.work;
-import java.util.logging.Logger;
-
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.shutdown.ShutdownRun;
import org.apache.hyracks.control.common.work.SynchronizableWork;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NotifyShutdownWork extends SynchronizableWork {
- private static final Logger LOGGER = Logger.getLogger(NotifyShutdownWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final String nodeId;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
index 2dae4b0..1cf443e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
@@ -18,13 +18,13 @@
*/
package org.apache.hyracks.control.cc.work;
-import java.util.logging.Logger;
-
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NotifyThreadDumpResponse extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(NotifyThreadDumpResponse.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
@@ -39,10 +39,10 @@
@Override
public void run() {
- LOGGER.fine("Delivering thread dump response: " + requestId);
+ LOGGER.debug("Delivering thread dump response: " + requestId);
final GetThreadDumpWork.ThreadDumpRun threadDumpRun = ccs.removeThreadDumpRun(requestId);
if (threadDumpRun == null) {
- LOGGER.warning("Thread dump run " + requestId + " not found; discarding reply: " + threadDumpJSON);
+ LOGGER.warn("Thread dump run " + requestId + " not found; discarding reply: " + threadDumpJSON);
} else {
threadDumpRun.notifyThreadDumpReceived(threadDumpJSON);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
index d1d2208..07b0f04 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
@@ -20,8 +20,6 @@
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.config.IApplicationConfig;
import org.apache.hyracks.api.config.IOption;
@@ -34,9 +32,12 @@
import org.apache.hyracks.control.common.ipc.NodeControllerRemoteProxy;
import org.apache.hyracks.control.common.work.SynchronizableWork;
import org.apache.hyracks.ipc.api.IIPCHandle;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RegisterNodeWork extends SynchronizableWork {
- private static final Logger LOGGER = Logger.getLogger(RegisterNodeWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final NodeRegistration reg;
@@ -53,7 +54,7 @@
CCNCFunctions.NodeRegistrationResult result;
Map<IOption, Object> ncConfiguration = new HashMap<>();
try {
- LOGGER.log(Level.WARNING, "Registering INodeController: id = " + id);
+ LOGGER.log(Level.WARN, "Registering INodeController: id = " + id);
NodeControllerRemoteProxy nc =
new NodeControllerRemoteProxy(ccs.getClusterIPC(), reg.getNodeControllerAddress());
NodeControllerState state = new NodeControllerState(nc, reg);
@@ -72,12 +73,12 @@
result = new CCNCFunctions.NodeRegistrationResult(params, null);
ccs.getJobIdFactory().ensureMinimumId(reg.getMaxJobId() + 1);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Node registration failed", e);
+ LOGGER.log(Level.WARN, "Node registration failed", e);
result = new CCNCFunctions.NodeRegistrationResult(null, e);
}
- LOGGER.warning("sending registration response to node");
+ LOGGER.warn("sending registration response to node");
ncIPCHandle.send(-1, result, null);
- LOGGER.warning("notifying node join");
+ LOGGER.warn("notifying node join");
ccs.getContext().notifyNodeJoin(id, ncConfiguration);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
index 7117b6f..ad36701 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
@@ -20,8 +20,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.dataset.ResultSetId;
@@ -31,10 +29,13 @@
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.common.work.NoOpCallback;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RegisterResultPartitionLocationWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(RegisterResultPartitionLocationWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
@@ -70,7 +71,7 @@
ccs.getDatasetDirectoryService().registerResultPartitionLocation(jobId, rsId, orderedResult, emptyResult,
partition, nPartitions, networkAddress);
} catch (HyracksDataException e) {
- LOGGER.log(Level.WARNING, "Failed to register partition location", e);
+ LOGGER.log(Level.WARN, "Failed to register partition location", e);
// Should fail the job if exists on cc, otherwise, do nothing
JobRun jobRun = ccs.getJobManager().get(jobId);
if (jobRun != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
index a162708..ee10669 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.control.cc.work;
import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.exceptions.HyracksException;
@@ -30,9 +28,12 @@
import org.apache.hyracks.control.cc.job.IJobManager;
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class RemoveDeadNodesWork extends AbstractWork {
- private static Logger LOGGER = Logger.getLogger(RemoveDeadNodesWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
@@ -49,7 +50,7 @@
Collection<JobId> affectedJobIds = result.getRight();
int size = affectedJobIds.size();
if (size > 0) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Number of affected jobs: " + size);
}
IJobManager jobManager = ccs.getJobManager();
@@ -64,12 +65,12 @@
ccs.getContext().notifyNodeFailure(deadNodes);
}
} catch (HyracksException e) {
- LOGGER.log(Level.WARNING, "Uncaught exception on notifyNodeFailure", e);
+ LOGGER.log(Level.WARN, "Uncaught exception on notifyNodeFailure", e);
}
}
@Override
public Level logLevel() {
- return Level.FINE;
+ return Level.DEBUG;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java
index 02806a0..e5e6f43 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ReportProfilesWork.java
@@ -20,12 +20,12 @@
package org.apache.hyracks.control.cc.work;
import java.util.List;
-import java.util.logging.Level;
import org.apache.hyracks.control.cc.job.IJobManager;
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.control.common.job.profiling.om.JobProfile;
import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.Level;
public class ReportProfilesWork extends AbstractWork {
private final IJobManager jobManager;
@@ -49,6 +49,6 @@
@Override
public Level logLevel() {
- return Level.FINEST;
+ return Level.TRACE;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
index dfc22b1..aef331f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
@@ -23,11 +23,12 @@
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.net.Socket;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
import org.apache.hyracks.control.common.work.SynchronizableWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* A work which is run at CC shutdown for each NC specified in the configuration file.
@@ -35,7 +36,7 @@
*/
public class ShutdownNCServiceWork extends SynchronizableWork {
- private static final Logger LOGGER = Logger.getLogger(ShutdownNCServiceWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final String ncHost;
private final int ncPort;
@@ -55,7 +56,7 @@
oos.writeUTF(ServiceCommand.TERMINATE.name());
oos.close();
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failed to contact NC service '" + ncId + "' at " + ncHost + ":" + ncPort, e);
+ LOGGER.log(Level.WARN, "Failed to contact NC service '" + ncId + "' at " + ncHost + ":" + ncPort, e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java
index 8f50087..a2be15c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TaskFailureWork.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.control.cc.work;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
import org.apache.hyracks.api.job.JobId;
@@ -28,9 +26,12 @@
import org.apache.hyracks.control.cc.job.IJobManager;
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.control.cc.job.TaskAttempt;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TaskFailureWork extends AbstractTaskLifecycleWork {
- private static final Logger LOGGER = Logger.getLogger(TaskFailureWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final List<Exception> exceptions;
public TaskFailureWork(ClusterControllerService ccs, JobId jobId, TaskAttemptId taId, String nodeId,
@@ -41,7 +42,7 @@
@Override
protected void performEvent(TaskAttempt ta) {
- LOGGER.log(Level.WARNING, "Executing task failure work for " + this, exceptions.get(0));
+ LOGGER.log(Level.WARN, "Executing task failure work for " + this, exceptions.get(0));
IJobManager jobManager = ccs.getJobManager();
JobRun run = jobManager.get(jobId);
ccs.getDatasetDirectoryService().reportJobFailure(jobId, exceptions);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
index ab526e8..2f80f5b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
@@ -24,14 +24,15 @@
import java.io.ObjectOutputStream;
import java.io.StringWriter;
import java.net.Socket;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
import org.apache.hyracks.control.common.work.AbstractWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.ini4j.Ini;
/**
@@ -40,7 +41,7 @@
*/
public class TriggerNCWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(TriggerNCWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ClusterControllerService ccs;
private final String ncHost;
@@ -67,7 +68,7 @@
return;
// QQQ Should probably have an ACK here
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failed to contact NC service at " + ncHost + ":" + ncPort
+ LOGGER.log(Level.WARN, "Failed to contact NC service at " + ncHost + ":" + ncPort
+ "; will retry", e);
}
try {
@@ -93,8 +94,8 @@
// entry point so that NCs can determine where all their config is.
ccini.put(Section.LOCALNC.sectionName(), NCConfig.Option.NODE_ID.ini(), ncId);
ccini.store(iniString);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Returning Ini file:\n" + iniString.toString());
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Returning Ini file:\n" + iniString.toString());
}
return iniString.toString();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
index e020ef2..0f12936 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -79,5 +79,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index 142afbf..7d3ada5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -40,8 +40,6 @@
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -54,6 +52,9 @@
import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.ini4j.Ini;
import org.ini4j.Profile;
import org.kohsuke.args4j.CmdLineException;
@@ -64,7 +65,7 @@
public class ConfigManager implements IConfigManager, Serializable {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(ConfigManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private HashSet<IOption> registeredOptions = new HashSet<>();
private HashMap<IOption, Object> definedMap = new HashMap<>();
@@ -143,7 +144,7 @@
if (configured) {
throw new IllegalStateException("configuration already processed");
}
- LOGGER.fine("registering option: " + option.toIniString());
+ LOGGER.debug("registering option: " + option.toIniString());
Map<String, IOption> optionMap = sectionMap.computeIfAbsent(option.section(), section -> new HashMap<>());
IOption prev = optionMap.put(option.ini(), option);
if (prev != null) {
@@ -157,9 +158,9 @@
(node, value,
isDefault) -> correctedMap(option.section() == Section.NC ? node : null, isDefault)
.put(option, value));
- if (LOGGER.isLoggable(Level.FINE)) {
+ if (LOGGER.isDebugEnabled()) {
optionSetters.put(option, (node, value, isDefault) -> LOGGER
- .fine((isDefault ? "defaulting" : "setting ") + option.toIniString() + " to " + value));
+ .debug((isDefault ? "defaulting" : "setting ") + option.toIniString() + " to " + value));
}
}
}
@@ -171,12 +172,12 @@
}
public void ensureNode(String nodeId) {
- LOGGER.fine("ensureNode: " + nodeId);
+ LOGGER.debug("ensureNode: " + nodeId);
nodeSpecificMap.computeIfAbsent(nodeId, this::createNodeSpecificMap);
}
private Map<IOption, Object> createNodeSpecificMap(String nodeId) {
- LOGGER.fine("createNodeSpecificMap: " + nodeId);
+ LOGGER.debug("createNodeSpecificMap: " + nodeId);
return Collections.synchronizedMap(new HashMap<>());
}
@@ -259,7 +260,7 @@
cmdLineParser.addArgument(new Args4jSetter(o -> appArgs.add(String.valueOf(o)), true, String.class),
new Args4jArgument());
}
- LOGGER.fine("parsing cmdline: " + Arrays.toString(args));
+ LOGGER.debug("parsing cmdline: " + Arrays.toString(args));
if (args == null || args.length == 0) {
LOGGER.info("no command line args supplied");
return appArgs;
@@ -271,7 +272,7 @@
ConfigUtils.printUsage(e, usageFilter, System.err);
throw e;
} else {
- LOGGER.log(Level.FINE, "Ignoring parse exception due to -help", e);
+ LOGGER.log(Level.DEBUG, "Ignoring parse exception due to -help", e);
}
}
if (bean.help) {
@@ -325,7 +326,7 @@
return;
}
final String value = iniOption.getValue();
- LOGGER.fine("setting " + option.toIniString() + " to " + value);
+ LOGGER.debug("setting " + option.toIniString() + " to " + value);
final Object parsed = option.type().parse(value);
invokeSetters(option, parsed, node);
}
@@ -348,7 +349,7 @@
}
private void applyDefaults() {
- LOGGER.fine("applying defaults");
+ LOGGER.debug("applying defaults");
sectionMap.forEach((key, value) -> {
if (key == Section.NC) {
value.values().forEach(option -> getNodeNames()
@@ -487,7 +488,7 @@
throw new IllegalStateException("Option not registered with ConfigManager: " + option.toIniString() + "("
+ option.getClass() + "." + option + ")");
} else if (option.section() == Section.NC) {
- LOGGER.warning("NC option " + option.toIniString() + " being accessed outside of NC-scoped configuration.");
+ LOGGER.warn("NC option " + option.toIniString() + " being accessed outside of NC-scoped configuration.");
}
return getOrDefault(configurationMap, option, null);
}
@@ -511,7 +512,7 @@
if (description != null && !"".equals(description)) {
usage.append(description).append(" ");
} else {
- LOGGER.warning("missing description for option: "
+ LOGGER.warn("missing description for option: "
+ option.getClass().getName().substring(option.getClass().getName().lastIndexOf(".") + 1) + "."
+ option.name());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
index 1e92a7a..62e6ee0 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
@@ -19,11 +19,11 @@
package org.apache.hyracks.control.common.config;
import java.net.MalformedURLException;
-import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.api.config.IOptionType;
import org.apache.hyracks.util.StorageUtil;
+import org.apache.logging.log4j.Level;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -169,7 +169,14 @@
public static final IOptionType<Level> LEVEL = new IOptionType<Level>() {
@Override
public Level parse(String s) {
- return s == null ? null : Level.parse(s);
+ if (s == null) {
+ throw new IllegalArgumentException("Logging level cannot be null");
+ }
+ final Level level = Level.getLevel(s);
+ if (level == null) {
+ throw new IllegalArgumentException("Unrecognized logging level: " + s);
+ }
+ return level;
}
@Override
@@ -179,12 +186,12 @@
@Override
public String serializeToJSON(Object value) {
- return value == null ? null : ((Level)value).getName();
+ return value == null ? null : ((Level)value).name();
}
@Override
public String serializeToIni(Object value) {
- return ((Level)value).getName();
+ return ((Level)value).name();
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java
index da1714b..a9ca771 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/dataset/ResultStateSweeper.java
@@ -21,11 +21,11 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataset.IDatasetManager;
import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
/**
* Sweeper to clean up the stale result distribution files and result states.
@@ -59,7 +59,7 @@
Thread.sleep(resultSweepThreshold);
sweep();
} catch (InterruptedException e) {
- logger.log(Level.WARNING, "Result cleaner thread interrupted, shutting down.");
+ logger.log(Level.WARN, "Result cleaner thread interrupted, shutting down.");
break; // the interrupt was explicit from another thread. This thread should shut down...
}
}
@@ -78,8 +78,8 @@
datasetManager.deinitState(jobId);
}
}
- if (logger.isLoggable(Level.FINER)) {
- logger.finer("Result state cleanup instance successfully completed.");
+ if (logger.isTraceEnabled()) {
+ logger.trace("Result state cleanup instance successfully completed.");
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
index 77c352e..dca8c07 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
@@ -35,8 +35,6 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.api.comm.NetworkAddress;
@@ -66,9 +64,12 @@
import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
import org.apache.hyracks.ipc.api.IPayloadSerializerDeserializer;
import org.apache.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CCNCFunctions {
- private static final Logger LOGGER = Logger.getLogger(CCNCFunctions.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int FID_CODE_SIZE = 1;
@@ -1350,7 +1351,7 @@
try {
serialize(baos, object, fid);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Error serializing " + object, e);
+ LOGGER.log(Level.ERROR, "Error serializing " + object, e);
throw e;
}
baos.close();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index f9af4c63..447d678 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -20,7 +20,6 @@
import java.net.InetSocketAddress;
import java.util.List;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
@@ -55,9 +54,11 @@
import org.apache.hyracks.control.common.job.profiling.om.JobProfile;
import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
import org.apache.hyracks.ipc.impl.IPCSystem;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ClusterControllerRemoteProxy extends ControllerRemoteProxy implements IClusterController {
- private static final Logger LOGGER = Logger.getLogger(ClusterControllerRemoteProxy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final int clusterConnectRetries;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java
index 83972d5..fe9e85a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ControllerRemoteProxy.java
@@ -19,12 +19,12 @@
package org.apache.hyracks.control.common.ipc;
import java.net.InetSocketAddress;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.ipc.api.IIPCHandle;
import org.apache.hyracks.ipc.exceptions.IPCException;
import org.apache.hyracks.ipc.impl.IPCSystem;
+import org.apache.logging.log4j.Logger;
public abstract class ControllerRemoteProxy {
protected final IPCSystem ipc;
@@ -55,14 +55,14 @@
try {
final boolean first = ipcHandle == null;
if (!first) {
- getLogger().warning("ipcHandle " + ipcHandle + " disconnected; retrying connection");
+ getLogger().warn("ipcHandle " + ipcHandle + " disconnected; retrying connection");
eventListener.ipcHandleDisconnected(ipcHandle);
}
ipcHandle = ipc.getHandle(inetSocketAddress, maxRetries);
if (first) {
eventListener.ipcHandleConnected(ipcHandle);
} else {
- getLogger().warning("ipcHandle " + ipcHandle + " restored");
+ getLogger().warn("ipcHandle " + ipcHandle + " restored");
eventListener.ipcHandleRestored(ipcHandle);
}
} catch (IPCException e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
index 8431eca..b4aaf45 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
@@ -23,7 +23,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
@@ -50,9 +49,11 @@
import org.apache.hyracks.control.common.ipc.CCNCFunctions.UndeployJobSpecFunction;
import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
import org.apache.hyracks.ipc.impl.IPCSystem;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NodeControllerRemoteProxy extends ControllerRemoteProxy implements INodeController {
- private static final Logger LOGGER = Logger.getLogger(NodeControllerRemoteProxy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public NodeControllerRemoteProxy(IPCSystem ipc, InetSocketAddress inetSocketAddress) {
super(ipc, inetSocketAddress);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java
index c9ef4d08..6c50b09 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/HyracksThreadFactory.java
@@ -21,14 +21,16 @@
import java.lang.Thread.UncaughtExceptionHandler;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class HyracksThreadFactory implements ThreadFactory {
private final String identifier;
private final AtomicInteger threadId = new AtomicInteger();
- private static final Logger LOGGER = Logger.getLogger(HyracksThreadFactory.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public HyracksThreadFactory(String identifier) {
this.identifier = identifier;
@@ -41,7 +43,7 @@
t.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
- LOGGER.log(Level.SEVERE, "Uncaught exception by " + t.getName(), e);
+ LOGGER.log(Level.ERROR, "Uncaught exception by " + t.getName(), e);
}
});
return t;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java
index 076dd66..b7f3332 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/AbstractWork.java
@@ -18,9 +18,10 @@
*/
package org.apache.hyracks.control.common.work;
-import java.util.logging.Level;
+import org.apache.logging.log4j.Level;
public abstract class AbstractWork implements Runnable {
+
public Level logLevel() {
return Level.INFO;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
index f9952db..ad64fd2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
@@ -18,10 +18,14 @@
*/
package org.apache.hyracks.control.common.work;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class SynchronizableWork extends AbstractWork {
+
+ private static final Logger LOGGER = LogManager.getLogger();
+
private boolean done;
private Exception e;
@@ -38,7 +42,7 @@
try {
doRun();
} catch (Exception ex) {
- Logger.getLogger(getClass().getName()).log(Level.INFO, "Exception thrown from work", ex);
+ LOGGER.log(Level.INFO, "Exception thrown from work", ex);
this.e = ex;
} finally {
synchronized (this) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
index f1b00ab..dbcba99 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
@@ -23,13 +23,14 @@
import java.lang.management.ThreadMXBean;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class WorkQueue {
- private static final Logger LOGGER = Logger.getLogger(WorkQueue.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
//to be fixed when application vs. hyracks log level issues are sorted
private static final boolean DEBUG = false;
@@ -80,10 +81,10 @@
public void schedule(AbstractWork event) {
if (DEBUG) {
- LOGGER.log(Level.FINEST, "Enqueue (" + hashCode() + "): " + enqueueCount.incrementAndGet());
+ LOGGER.log(Level.DEBUG, "Enqueue (" + hashCode() + "): " + enqueueCount.incrementAndGet());
}
- if (LOGGER.isLoggable(Level.FINER)) {
- LOGGER.finer("Scheduling: " + event);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Scheduling: " + event);
}
queue.offer(event);
}
@@ -115,18 +116,18 @@
break;
}
if (DEBUG) {
- LOGGER.log(Level.FINEST,
+ LOGGER.log(Level.TRACE,
"Dequeue (" + WorkQueue.this.hashCode() + "): " + dequeueCount.incrementAndGet() + "/"
+ enqueueCount);
}
- if (LOGGER.isLoggable(r.logLevel())) {
+ if (LOGGER.isEnabled(r.logLevel())) {
LOGGER.log(r.logLevel(), "Executing: " + r);
}
ThreadInfo before = threadMXBean.getThreadInfo(thread.getId());
try {
r.run();
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception while executing " + r, e);
+ LOGGER.log(Level.WARN, "Exception while executing " + r, e);
} finally {
auditWaitsAndBlocks(r, before);
}
@@ -138,7 +139,7 @@
final long waitedDelta = after.getWaitedCount() - before.getWaitedCount();
final long blockedDelta = after.getBlockedCount() - before.getBlockedCount();
if (waitedDelta > 0 || blockedDelta > 0) {
- LOGGER.warning("Work " + r + " waited " + waitedDelta + " times (~"
+ LOGGER.warn("Work " + r + " waited " + waitedDelta + " times (~"
+ (after.getWaitedTime() - before.getWaitedTime()) + "ms), blocked " + blockedDelta
+ " times (~" + (after.getBlockedTime() - before.getBlockedTime()) + "ms)"
);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index b5e96e3..594b701 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -83,5 +83,13 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-core</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java
index 4d8cbbd..94e86dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/BaseNCApplication.java
@@ -20,8 +20,6 @@
import java.lang.management.ManagementFactory;
import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.INCApplication;
import org.apache.hyracks.api.application.IServiceContext;
@@ -33,6 +31,8 @@
import org.apache.hyracks.control.common.controllers.ControllerConfig;
import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.nc.io.DefaultDeviceResolver;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.config.Configurator;
public class BaseNCApplication implements INCApplication {
public static final BaseNCApplication INSTANCE = new BaseNCApplication();
@@ -97,7 +97,7 @@
}
protected void configureLoggingLevel(Level level) {
- Logger.getLogger("org.apache.hyracks").setLevel(level);
+ Configurator.setLevel("org.apache.hyracks", level);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
index ce666b0..8cb33ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
@@ -25,7 +25,6 @@
import java.util.Hashtable;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.comm.IPartitionCollector;
@@ -60,9 +59,11 @@
import org.apache.hyracks.control.nc.io.WorkspaceFileFactory;
import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
import org.apache.hyracks.control.nc.resources.memory.FrameManager;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class Joblet implements IHyracksJobletContext, ICounterContext {
- private static final Logger LOGGER = Logger.getLogger(Joblet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService nodeController;
@@ -221,7 +222,7 @@
public void close() {
long stillAllocated = memoryAllocation.get();
if (stillAllocated > 0) {
- LOGGER.warning("Freeing leaked " + stillAllocated + " bytes");
+ LOGGER.warn("Freeing leaked " + stillAllocated + " bytes");
serviceCtx.getMemoryManager().deallocate(stillAllocated);
}
nodeController.getExecutor().execute(new Runnable() {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
index 11df079..ec8cf27 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
@@ -20,18 +20,19 @@
import java.io.IOException;
import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.INCApplication;
import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.config.ConfigUtils;
import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.kohsuke.args4j.CmdLineException;
@SuppressWarnings("InfiniteLoopStatement")
public class NCDriver {
- private static final Logger LOGGER = Logger.getLogger(NCDriver.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private NCDriver() {
}
@@ -49,10 +50,10 @@
Thread.sleep(10000);
}
} catch (CmdLineException e) {
- LOGGER.log(Level.FINE, "Exception parsing command line: " + Arrays.toString(args), e);
+ LOGGER.log(Level.DEBUG, "Exception parsing command line: " + Arrays.toString(args), e);
System.exit(2);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Exiting NCDriver due to exception", e);
+ LOGGER.log(Level.DEBUG, "Exiting NCDriver due to exception", e);
System.exit(1);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java
index 6308373..020e564 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCShutdownHook.java
@@ -18,10 +18,10 @@
*/
package org.apache.hyracks.control.nc;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.util.ThreadDumpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Shutdown hook that invokes {@link NodeControllerService#stop() stop} method.
@@ -32,7 +32,7 @@
public static final int FAILED_TO_STARTUP_EXIT_CODE = 2;
public static final int FAILED_TO_RECOVER_EXIT_CODE = 3;
- private static final Logger LOGGER = Logger.getLogger(NCShutdownHook.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long SHUTDOWN_WAIT_TIME = 10 * 60 * 1000L;
private final Thread watchDog;
private final NodeControllerService nodeControllerService;
@@ -73,7 +73,7 @@
LOGGER.log(Level.INFO, () -> "Thread dump at shutdown: " + ThreadDumpUtil.takeDumpString());
nodeControllerService.stop();
} catch (Throwable th) { // NOSONAR... This is fine since this is shutdown hook
- LOGGER.log(Level.WARNING, "Exception in executing shutdown hook", th);
+ LOGGER.log(Level.WARN, "Exception in executing shutdown hook", th);
}
}
}
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index a3a9ac5..01e34c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -40,8 +40,6 @@
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
@@ -95,10 +93,13 @@
import org.apache.hyracks.util.PidHelper;
import org.apache.hyracks.util.trace.ITracer;
import org.apache.hyracks.util.trace.Tracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.kohsuke.args4j.CmdLineException;
public class NodeControllerService implements IControllerService {
- private static final Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final double MEMORY_FUDGE_FACTOR = 0.8;
private static final long ONE_SECOND_NANOS = TimeUnit.SECONDS.toNanos(1);
@@ -196,7 +197,7 @@
throw new HyracksException("id not set");
}
lccm = new LifeCycleComponentManager();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Setting uncaught exception handler " + getLifeCycleComponentManager());
}
// Set shutdown hook before so it doesn't have the same uncaught exception handler
@@ -305,7 +306,7 @@
try {
registerNode();
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failed Registering with cc", e);
+ LOGGER.log(Level.WARN, "Failed Registering with cc", e);
throw new IPCException(e);
}
}
@@ -361,7 +362,7 @@
}
}
if (registrationException != null) {
- LOGGER.log(Level.WARNING, "Registering with Cluster Controller failed with exception",
+ LOGGER.log(Level.WARN, "Registering with Cluster Controller failed with exception",
registrationException);
throw registrationException;
}
@@ -390,7 +391,7 @@
application.preStop();
executor.shutdownNow();
if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
- LOGGER.log(Level.SEVERE, "Some jobs failed to exit, continuing with abnormal shutdown");
+ LOGGER.log(Level.ERROR, "Some jobs failed to exit, continuing with abnormal shutdown");
}
partitionManager.close();
datasetPartitionManager.close();
@@ -412,13 +413,13 @@
try {
ccs.notifyShutdown(id);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception notifying CC of shutdown", e);
+ LOGGER.log(Level.WARN, "Exception notifying CC of shutdown", e);
}
ipc.stop();
LOGGER.log(Level.INFO, "Stopped NodeControllerService");
} else {
- LOGGER.log(Level.SEVERE, "Duplicate shutdown call; original: " + Arrays.toString(shutdownCallStack),
+ LOGGER.log(Level.ERROR, "Duplicate shutdown call; original: " + Arrays.toString(shutdownCallStack),
new Exception("Duplicate shutdown call"));
}
}
@@ -540,7 +541,7 @@
if (delayNanos > 0) {
delayBlock.tryAcquire(delayNanos, TimeUnit.NANOSECONDS); //NOSONAR - ignore result of tryAcquire
} else {
- LOGGER.warning("After sending heartbeat, next one is already late by "
+ LOGGER.warn("After sending heartbeat, next one is already late by "
+ TimeUnit.NANOSECONDS.toMillis(-delayNanos) + "ms; sending without delay");
}
}
@@ -591,15 +592,15 @@
try {
cc.nodeHeartbeat(id, hbData);
- LOGGER.log(Level.FINE, "Successfully sent heartbeat");
+ LOGGER.log(Level.DEBUG, "Successfully sent heartbeat");
return true;
} catch (InterruptedException e) {
throw e;
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.log(Level.FINE, "Exception sending heartbeat; will retry after 1s", e);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.log(Level.DEBUG, "Exception sending heartbeat; will retry after 1s", e);
} else {
- LOGGER.log(Level.SEVERE, "Exception sending heartbeat; will retry after 1s: " + e.toString());
+ LOGGER.log(Level.ERROR, "Exception sending heartbeat; will retry after 1s: " + e.toString());
}
return false;
}
@@ -624,7 +625,7 @@
cc.reportProfile(id, profiles);
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception reporting profile", e);
+ LOGGER.log(Level.WARN, "Exception reporting profile", e);
}
}
}
@@ -644,7 +645,7 @@
try {
tracer.instant("CurrentTime", traceCategory, Tracer.Scope.p, Tracer.dateTimeStamp());
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception tracing current time", e);
+ LOGGER.log(Level.WARN, "Exception tracing current time", e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
index 94ee92b..34ddd6a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
@@ -31,8 +31,6 @@
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Semaphore;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameReader;
import org.apache.hyracks.api.comm.IFrameWriter;
@@ -69,9 +67,12 @@
import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
import org.apache.hyracks.control.nc.work.NotifyTaskCompleteWork;
import org.apache.hyracks.control.nc.work.NotifyTaskFailureWork;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class Task implements IHyracksTaskContext, ICounterContext, Runnable {
- private static final Logger LOGGER = Logger.getLogger(Task.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Joblet joblet;
@@ -355,9 +356,9 @@
removePendingThread(ct);
}
if (!exceptions.isEmpty()) {
- if (LOGGER.isLoggable(Level.WARNING)) {
+ if (LOGGER.isWarnEnabled()) {
for (int i = 0; i < exceptions.size(); i++) {
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Task " + taskAttemptId + " failed with exception"
+ (exceptions.size() > 1 ? "s (" + (i + 1) + "/" + exceptions.size() + ")" : ""),
exceptions.get(i));
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java
index 962d541..bc980e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionManager.java
@@ -22,7 +22,6 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -36,9 +35,11 @@
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.control.nc.io.WorkspaceFileFactory;
import org.apache.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetPartitionManager implements IDatasetPartitionManager {
- private static final Logger LOGGER = Logger.getLogger(DatasetPartitionManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
@@ -82,7 +83,7 @@
resultStates[partition] = dpw.getResultState();
}
- LOGGER.fine("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
+ LOGGER.debug("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
return dpw;
}
@@ -101,7 +102,7 @@
@Override
public void reportPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws HyracksException {
try {
- LOGGER.fine("Reporting partition write completion: JobId: " + jobId + ": ResultSetId: " + rsId
+ LOGGER.debug("Reporting partition write completion: JobId: " + jobId + ": ResultSetId: " + rsId
+ ":partition: " + partition);
ncs.getClusterController().reportResultPartitionWriteCompletion(jobId, rsId, partition);
} catch (Exception e) {
@@ -115,7 +116,7 @@
ResultState resultState = getResultState(jobId, resultSetId, partition);
DatasetPartitionReader dpr = new DatasetPartitionReader(this, datasetMemoryManager, executor, resultState);
dpr.writeTo(writer);
- LOGGER.fine("Initialized partition reader: JobId: " + jobId + ":ResultSetId: " + resultSetId + ":partition: "
+ LOGGER.debug("Initialized partition reader: JobId: " + jobId + ":ResultSetId: " + resultSetId + ":partition: "
+ partition);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
index 732ee68..ec33b05 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
@@ -20,15 +20,15 @@
import java.nio.ByteBuffer;
import java.util.concurrent.Executor;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.comm.channels.NetworkOutputChannel;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetPartitionReader {
- private static final Logger LOGGER = Logger.getLogger(DatasetPartitionReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final DatasetPartitionManager datasetPartitionManager;
@@ -85,7 +85,7 @@
} catch (HyracksDataException e) {
throw new RuntimeException(e);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("result reading successful(" + resultState.getResultSetPartitionId() + ")");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java
index b654d44..2bf5326 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionWriter.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.control.nc.dataset;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -31,9 +29,11 @@
import org.apache.hyracks.api.io.IWorkspaceFileFactory;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.partitions.ResultSetPartitionId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetPartitionWriter implements IFrameWriter {
- private static final Logger LOGGER = Logger.getLogger(DatasetPartitionWriter.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IDatasetPartitionManager manager;
@@ -79,7 +79,7 @@
@Override
public void open() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("open(" + partition + ")");
}
partitionRegistered = false;
@@ -105,7 +105,7 @@
@Override
public void close() throws HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("close(" + partition + ")");
}
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java
index 579f68b..3957401 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultSetMap.java
@@ -21,16 +21,17 @@
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataset.IDatasetStateRecord;
import org.apache.hyracks.api.dataset.ResultSetId;
import org.apache.hyracks.api.job.JobId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
class ResultSetMap implements IDatasetStateRecord, Serializable {
private static final long serialVersionUID = 1L;
- private static final Logger LOGGER = Logger.getLogger(DatasetPartitionManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final long timestamp;
private final HashMap<ResultSetId, ResultState[]> resultStateMap;
@@ -70,7 +71,7 @@
final ResultState state = resultStates[partition];
if (state != null) {
state.closeAndDelete();
- LOGGER.fine("Removing partition: " + partition + " for JobId: " + jobId);
+ LOGGER.debug("Removing partition: " + partition + " for JobId: " + jobId);
}
resultStates[partition] = null;
boolean stateEmpty = true;
@@ -95,7 +96,7 @@
void closeAndDeleteAll() {
applyToAllStates((rsId, state, i) -> {
state.closeAndDelete();
- LOGGER.fine("Removing partition: " + i + " for result set " + rsId);
+ LOGGER.debug("Removing partition: " + i + " for result set " + rsId);
});
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java
index 0b74806..5eba281 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/DatasetNetworkManager.java
@@ -22,8 +22,6 @@
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
@@ -40,9 +38,11 @@
import org.apache.hyracks.net.protocols.muxdemux.MultiplexedConnection;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemux;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DatasetNetworkManager implements IChannelConnectionFactory {
- private static final Logger LOGGER = Logger.getLogger(DatasetNetworkManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MAX_CONNECTION_ATTEMPTS = 5;
@@ -137,8 +137,8 @@
JobId jobId = new JobId(buffer.getLong());
ResultSetId rsId = new ResultSetId(buffer.getLong());
int partition = buffer.getInt();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Received initial dataset partition read request for JobId: " + jobId + " partition: "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Received initial dataset partition read request for JobId: " + jobId + " partition: "
+ partition + " on channel: " + ccb);
}
noc = new NetworkOutputChannel(ccb, nBuffers);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java
index 7983b93..a37d131 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/MessagingNetworkManager.java
@@ -25,8 +25,6 @@
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.api.comm.IChannelControlBlock;
@@ -40,10 +38,12 @@
import org.apache.hyracks.net.protocols.muxdemux.MultiplexedConnection;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemux;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MessagingNetworkManager {
- private static final Logger LOGGER = Logger.getLogger(MessagingNetworkManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MAX_CONNECTION_ATTEMPTS = 5;
private final MuxDemux md;
private NetworkAddress localNetworkAddress;
@@ -193,8 +193,8 @@
@Override
public void accept(ByteBuffer buffer) {
String nodeId = readMessagingInitialMessage(buffer);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Opened messaging channel with node: " + nodeId);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Opened messaging channel with node: " + nodeId);
}
// Return the channel's original acceptor
ICloseableBufferAcceptor originalAcceptor;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java
index 325966a..f3276a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/net/NetworkManager.java
@@ -22,8 +22,6 @@
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
@@ -41,9 +39,11 @@
import org.apache.hyracks.net.protocols.muxdemux.MultiplexedConnection;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemux;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NetworkManager implements IChannelConnectionFactory {
- private static final Logger LOGGER = Logger.getLogger(NetworkManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MAX_CONNECTION_ATTEMPTS = 5;
@@ -125,8 +125,8 @@
@Override
public void accept(ByteBuffer buffer) {
PartitionId pid = readInitialMessage(buffer);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Received initial partition request: " + pid + " on channel: " + ccb);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Received initial partition request: " + pid + " on channel: " + ccb);
}
noc = new NetworkOutputChannel(ccb, nBuffers);
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
index 3b52dc9..b9d2f4d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
@@ -20,8 +20,6 @@
import java.nio.ByteBuffer;
import java.util.concurrent.Executor;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -32,9 +30,11 @@
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.api.partitions.PartitionId;
import org.apache.hyracks.control.common.job.PartitionState;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MaterializedPartitionWriter implements IFrameWriter {
- private static final Logger LOGGER = Logger.getLogger(MaterializedPartitionWriter.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IHyracksTaskContext ctx;
@@ -65,7 +65,7 @@
@Override
public void open() throws HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("open(" + pid + " by " + taId);
}
failed = false;
@@ -89,7 +89,7 @@
@Override
public void close() throws HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("close(" + pid + " by " + taId);
}
if (handle != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
index 3582da2..57eba53 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
@@ -20,8 +20,6 @@
import java.nio.ByteBuffer;
import java.util.concurrent.Executor;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -33,9 +31,12 @@
import org.apache.hyracks.api.partitions.IPartition;
import org.apache.hyracks.api.partitions.PartitionId;
import org.apache.hyracks.control.common.job.PartitionState;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MaterializingPipelinedPartition implements IFrameWriter, IPartition {
- private static final Logger LOGGER = Logger.getLogger(MaterializingPipelinedPartition.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IHyracksTaskContext ctx;
private final Executor executor;
@@ -50,7 +51,7 @@
private boolean failed;
protected boolean flushRequest;
private boolean deallocated;
- private Level openCloseLevel = Level.FINE;
+ private Level openCloseLevel = Level.DEBUG;
private Thread dataConsumerThread;
public MaterializingPipelinedPartition(IHyracksTaskContext ctx, PartitionManager manager, PartitionId pid,
@@ -164,7 +165,7 @@
}
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
} finally {
thread.setName(oldName);
setDataConsumerThread(null); // Sets back the data consumer thread to null.
@@ -180,7 +181,7 @@
@Override
public void open() throws HyracksDataException {
- if (LOGGER.isLoggable(openCloseLevel)) {
+ if (LOGGER.isEnabled(openCloseLevel)) {
LOGGER.log(openCloseLevel, "open(" + pid + " by " + taId);
}
size = 0;
@@ -213,7 +214,7 @@
@Override
public void close() throws HyracksDataException {
- if (LOGGER.isLoggable(openCloseLevel)) {
+ if (LOGGER.isEnabled(openCloseLevel)) {
LOGGER.log(openCloseLevel, "close(" + pid + " by " + taId);
}
if (writeHandle != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java
index e23aaaa..f43dcbc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/task/ThreadDumpTask.java
@@ -18,14 +18,14 @@
*/
package org.apache.hyracks.control.nc.task;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.util.ThreadDumpUtil;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ThreadDumpTask implements Runnable {
- private static final Logger LOGGER = Logger.getLogger(ThreadDumpTask.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
private final String requestId;
@@ -40,14 +40,14 @@
try {
result = ThreadDumpUtil.takeDumpJSONString();
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception taking thread dump", e);
+ LOGGER.log(Level.WARN, "Exception taking thread dump", e);
result = null;
}
try {
ncs.getClusterController().notifyThreadDump(
ncs.getContext().getNodeId(), requestId, result);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Exception sending thread dump to CC", e);
+ LOGGER.log(Level.WARN, "Exception sending thread dump to CC", e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java
index 56100da..6132639 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortAllJobsWork.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.control.nc.work;
import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
import org.apache.hyracks.api.job.JobStatus;
@@ -28,10 +26,13 @@
import org.apache.hyracks.control.nc.Joblet;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class AbortAllJobsWork extends SynchronizableWork {
- private static final Logger LOGGER = Logger.getLogger(AbortAllJobsWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
public AbortAllJobsWork(NodeControllerService ncs) {
@@ -40,14 +41,14 @@
@Override
protected void doRun() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Aborting all tasks");
}
IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
if (dpm != null) {
ncs.getDatasetPartitionManager().abortAllReaders();
} else {
- LOGGER.log(Level.WARNING, "DatasetPartitionManager is null on " + ncs.getId());
+ LOGGER.log(Level.WARN, "DatasetPartitionManager is null on " + ncs.getId());
}
Collection<Joblet> joblets = ncs.getJobletMap().values();
for (Joblet ji : joblets) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java
index 5870e76..80f3e98 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/AbortTasksWork.java
@@ -20,8 +20,6 @@
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
@@ -30,9 +28,12 @@
import org.apache.hyracks.control.nc.Joblet;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class AbortTasksWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(AbortTasksWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
@@ -48,7 +49,7 @@
@Override
public void run() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Aborting Tasks: " + jobId + ":" + tasks);
}
IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
@@ -65,7 +66,7 @@
}
}
} else {
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Joblet couldn't be found. Tasks of job " + jobId + " have all either completed or failed");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java
index 4f5b556..33d1d60 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ApplicationMessageWork.java
@@ -18,18 +18,18 @@
*/
package org.apache.hyracks.control.nc.work;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.deployment.DeploymentId;
import org.apache.hyracks.api.messages.IMessage;
import org.apache.hyracks.control.common.deployment.DeploymentUtils;
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.control.nc.application.NCServiceContext;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ApplicationMessageWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(ApplicationMessageWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private byte[] message;
private DeploymentId deploymentId;
private String nodeId;
@@ -50,10 +50,10 @@
if (ctx.getMessageBroker() != null) {
ctx.getMessageBroker().receivedMessage(data, nodeId);
} else {
- LOGGER.log(Level.WARNING, "Message was sent, but no Message Broker set!");
+ LOGGER.log(Level.WARN, "Message was sent, but no Message Broker set!");
}
} catch (Exception e) {
- Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "Error in application message delivery!", e);
+ LOGGER.warn("Error in application message delivery!", e);
throw new RuntimeException(e);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java
index 03ae90c..d38cd5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/CleanupJobletWork.java
@@ -21,8 +21,6 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobStatus;
@@ -30,9 +28,12 @@
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.nc.Joblet;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class CleanupJobletWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(CleanupJobletWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
@@ -48,7 +49,7 @@
@Override
public void run() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Cleaning up after job: " + jobId);
}
ncs.removeJobParameterByteStore(jobId);
@@ -62,8 +63,8 @@
// Put deallocate in a try block to make sure that every IPartition is de-allocated.
p.deallocate();
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java
index 675926e..449d9a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskCompleteWork.java
@@ -18,16 +18,16 @@
*/
package org.apache.hyracks.control.nc.work;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.control.common.job.profiling.om.TaskProfile;
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NotifyTaskCompleteWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(NotifyTaskCompleteWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
private final Task task;
@@ -44,7 +44,7 @@
ncs.getClusterController().notifyTaskComplete(task.getJobletContext().getJobId(), task.getTaskAttemptId(),
ncs.getId(), taskProfile);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failed notifying task complete for " + task.getTaskAttemptId(), e);
+ LOGGER.log(Level.ERROR, "Failed notifying task complete for " + task.getTaskAttemptId(), e);
}
task.getJoblet().removeTask(task);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java
index 7ed2c09..1d6ae1b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NotifyTaskFailureWork.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.control.nc.work;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
@@ -28,9 +26,12 @@
import org.apache.hyracks.control.common.work.AbstractWork;
import org.apache.hyracks.control.nc.NodeControllerService;
import org.apache.hyracks.control.nc.Task;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NotifyTaskFailureWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(NotifyTaskFailureWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
private final Task task;
private final JobId jobId;
@@ -48,7 +49,7 @@
@Override
public void run() {
- LOGGER.log(Level.WARNING, ncs.getId() + " is sending a notification to cc that task " + taskId + " has failed",
+ LOGGER.log(Level.WARN, ncs.getId() + " is sending a notification to cc that task " + taskId + " has failed",
exceptions.get(0));
try {
IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
@@ -57,7 +58,7 @@
}
ncs.getClusterController().notifyTaskFailure(jobId, taskId, ncs.getId(), exceptions);
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failure reporting task failure to cluster controller", e);
+ LOGGER.log(Level.ERROR, "Failure reporting task failure to cluster controller", e);
}
if (task != null) {
task.getJoblet().removeTask(task);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java
index a2fcc25..f818c0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/StartTasksWork.java
@@ -25,8 +25,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.comm.IFrameWriter;
@@ -70,9 +68,12 @@
import org.apache.hyracks.control.nc.partitions.PipelinedPartition;
import org.apache.hyracks.control.nc.partitions.ReceiveSideMaterializingCollector;
import org.apache.hyracks.control.nc.profiling.ProfilingPartitionWriterFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class StartTasksWork extends AbstractWork {
- private static final Logger LOGGER = Logger.getLogger(StartTasksWork.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final NodeControllerService ncs;
@@ -138,7 +139,7 @@
ActivityId aid = tid.getActivityId();
ActivityCluster ac = acg.getActivityMap().get(aid);
IActivity han = ac.getActivityMap().get(aid);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Initializing " + taId + " -> " + han);
}
final int partition = tid.getPartition();
@@ -152,9 +153,7 @@
for (int i = 0; i < inputs.size(); ++i) {
IConnectorDescriptor conn = inputs.get(i);
IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("input: " + i + ": " + conn.getConnectorId());
- }
+ LOGGER.info("input: {}: {}", i, conn.getConnectorId());
RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
IPartitionCollector collector =
createPartitionCollector(td, partition, task, i, conn, recordDesc, cPolicy);
@@ -171,10 +170,7 @@
IPartitionWriterFactory pwFactory =
createPartitionWriterFactory(task, cPolicy, jobId, conn, partition, taId, flags);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("output: " + i + ": " + conn.getConnectorId());
- }
+ LOGGER.info("input: {}: {}", i, conn.getConnectorId());
IFrameWriter writer = conn.createPartitioner(task, recordDesc, pwFactory, partition,
td.getPartitionCount(), td.getOutputPartitionCounts()[i]);
writer = enforce ? EnforceFrameWriter.enforce(writer) : writer;
@@ -188,7 +184,7 @@
taskIndex++;
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure starting a task", e);
+ LOGGER.log(Level.WARN, "Failure starting a task", e);
// notify cc of start task failure
List<Exception> exceptions = new ArrayList<>();
exceptions.add(e);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
index c4ade2c..4473e5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
@@ -66,6 +66,10 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
index c0e5678..b6d0b70 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
@@ -34,8 +34,6 @@
import java.util.Date;
import java.util.List;
import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hyracks.control.common.config.ConfigUtils;
@@ -43,6 +41,9 @@
import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.control.common.controllers.ServiceConstants;
import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.ini4j.Ini;
import org.kohsuke.args4j.CmdLineParser;
@@ -52,7 +53,7 @@
*/
public class NCService {
- private static final Logger LOGGER = Logger.getLogger(NCService.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/**
* The .ini read from the CC (*not* the ncservice.ini file)
@@ -159,7 +160,7 @@
// QQQ inheriting probably isn't right
pb.inheritIO();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Launching NCDriver process");
}
@@ -178,7 +179,7 @@
writer.write("---------------------\n");
}
pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logfile));
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Logging to " + logfile.getCanonicalPath());
}
}
@@ -200,13 +201,13 @@
}
return retval == 0;
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
+ if (LOGGER.isErrorEnabled()) {
StringWriter sw = new StringWriter();
try {
ini.store(sw);
- LOGGER.log(Level.SEVERE, "Configuration from CC broken: \n" + sw.toString(), e);
+ LOGGER.log(Level.ERROR, "Configuration from CC broken: \n" + sw.toString(), e);
} catch (IOException e1) {
- LOGGER.log(Level.SEVERE, "Configuration from CC broken, failed to serialize", e1);
+ LOGGER.log(Level.ERROR, "Configuration from CC broken, failed to serialize", e1);
}
}
return false;
@@ -225,7 +226,7 @@
ObjectInputStream ois = new ObjectInputStream(is);
String magic = ois.readUTF();
if (!ServiceConstants.NC_SERVICE_MAGIC_COOKIE.equals(magic)) {
- LOGGER.severe("Connection used incorrect magic cookie");
+ LOGGER.error("Connection used incorrect magic cookie");
return false;
}
switch (ServiceCommand.valueOf(ois.readUTF())) {
@@ -241,7 +242,7 @@
break;
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Error decoding connection from server", e);
+ LOGGER.log(Level.ERROR, "Error decoding connection from server", e);
}
return false;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
index f44ed74..5088719 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/pom.xml
@@ -79,6 +79,10 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java
index 819d751..5a39523 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameDeserializer.java
@@ -21,16 +21,16 @@
import java.io.DataInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.FrameConstants;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FrameDeserializer {
- private static final Logger LOGGER = Logger.getLogger(FrameDeserializer.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ByteBufferInputStream bbis;
@@ -70,8 +70,8 @@
Object[] record = new Object[recordDescriptor.getFieldCount()];
for (int i = 0; i < record.length; ++i) {
Object instance = recordDescriptor.getFields()[i].deserialize(di);
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest(i + " " + instance);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace(i + " " + instance);
}
record[i] = instance;
if (FrameConstants.DEBUG_FRAME_IO) {
@@ -84,8 +84,8 @@
}
}
}
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest("Read Record tIndex = " + tIndex + ", tupleCount = " + tupleCount);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace("Read Record tIndex = " + tIndex + ", tupleCount = " + tupleCount);
}
++tIndex;
return record;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java
index e8f826f..d3af00d 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameOutputStream.java
@@ -18,16 +18,15 @@
*/
package org.apache.hyracks.dataflow.common.comm.io;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FrameOutputStream extends ByteArrayAccessibleOutputStream {
- private static final Logger LOGGER = Logger.getLogger(FrameOutputStream.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final FrameTupleAppender frameTupleAppender;
@@ -42,15 +41,15 @@
public int getTupleCount() {
int tupleCount = frameTupleAppender.getTupleCount();
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest("appendTuple(): tuple count: " + tupleCount);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace("appendTuple(): tuple count: " + tupleCount);
}
return tupleCount;
}
public boolean appendTuple() throws HyracksDataException {
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest("appendTuple(): tuple size: " + count);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace("appendTuple(): tuple size: " + count);
}
boolean appended = frameTupleAppender.append(buf, 0, count);
count = 0;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
index d9a4c7c..f7b5e3b 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
@@ -18,9 +18,6 @@
*/
package org.apache.hyracks.dataflow.common.comm.io;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.comm.VSizeFrame;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -28,9 +25,11 @@
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class SerializingDataWriter implements IOpenableDataWriter<Object[]> {
- private static final Logger LOGGER = Logger.getLogger(SerializingDataWriter.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ArrayTupleBuilder tb;
@@ -82,8 +81,8 @@
tb.reset();
for (int i = 0; i < data.length; ++i) {
Object instance = data[i];
- if (LOGGER.isLoggable(Level.FINEST)) {
- LOGGER.finest(i + " " + instance);
+ if (LOGGER.isTraceEnabled()) {
+ LOGGER.trace(i + " " + instance);
}
tb.addField(recordDescriptor.getFields()[i], instance);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index a882c1c..fb16cba 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -90,5 +90,9 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
index 5a59b5d..6c67ecc 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/VariableDeletableTupleMemoryManager.java
@@ -21,8 +21,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
@@ -30,13 +28,15 @@
import org.apache.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppender;
import org.apache.hyracks.dataflow.std.sort.util.IAppendDeletableFrameTupleAccessor;
import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* Enable the delete record operation in the memory management. This is only used in the {@link org.apache.hyracks.dataflow.std.sort.HeapSortRunGenerator}
*/
public class VariableDeletableTupleMemoryManager implements IDeletableTupleBufferManager {
- private static final Logger LOG = Logger.getLogger(VariableDeletableTupleMemoryManager.class.getName());
+ private static final Logger LOG = LogManager.getLogger();
private final int minFreeSpace;
private final IFramePool pool;
@@ -161,8 +161,8 @@
policy.reset();
frames.clear();
numTuples = 0;
- if (LOG.isLoggable(Level.FINE)) {
- LOG.fine("VariableTupleMemoryManager has reorganized " + statsReOrg + " times");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("VariableTupleMemoryManager has reorganized " + statsReOrg + " times");
}
statsReOrg = 0;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
index d7d5c27..9676c9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/NonDeterministicChannelReader.java
@@ -20,16 +20,16 @@
import java.nio.ByteBuffer;
import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.channels.IInputChannel;
import org.apache.hyracks.api.channels.IInputChannelMonitor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.partitions.PartitionId;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class NonDeterministicChannelReader implements IInputChannelMonitor, IPartitionAcceptor {
- private static final Logger LOGGER = Logger.getLogger(NonDeterministicChannelReader.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final int nSenderPartitions;
@@ -143,8 +143,8 @@
public synchronized void notifyFailure(IInputChannel channel) {
PartitionId pid = (PartitionId) channel.getAttachment();
int senderIndex = pid.getSenderIndex();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Failure: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Failure: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
+ pid.getReceiverIndex());
}
failSenders.set(senderIndex);
@@ -156,8 +156,8 @@
public synchronized void notifyDataAvailability(IInputChannel channel, int nFrames) {
PartitionId pid = (PartitionId) channel.getAttachment();
int senderIndex = pid.getSenderIndex();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Data available: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Data available: " + pid.getConnectorDescriptorId() + " sender: " + senderIndex + " receiver: "
+ pid.getReceiverIndex());
}
availableFrameCounts[senderIndex] += nFrames;
@@ -169,8 +169,8 @@
public synchronized void notifyEndOfStream(IInputChannel channel) {
PartitionId pid = (PartitionId) channel.getAttachment();
int senderIndex = pid.getSenderIndex();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("EOS: " + pid);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("EOS: " + pid);
}
eosSenders.set(senderIndex);
notifyAll();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index 43b9685..40f02f9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.dataflow.std.group;
import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.comm.IFrameWriter;
@@ -48,10 +46,12 @@
import org.apache.hyracks.dataflow.std.structures.SerializableHashTable;
import org.apache.hyracks.dataflow.std.structures.TuplePointer;
import org.apache.hyracks.dataflow.std.util.FrameTuplePairComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class HashSpillableTableFactory implements ISpillableTableFactory {
- private static Logger LOGGER = Logger.getLogger(HashSpillableTableFactory.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final double FUDGE_FACTOR = 1.1;
private static final long serialVersionUID = 1L;
private final IBinaryHashFunctionFamily[] hashFunctionFamilies;
@@ -109,8 +109,8 @@
final int numPartitions = getNumOfPartitions(inputDataBytesSize / ctx.getInitialFrameSize(), memoryBudget);
final int entriesPerPartition = (int) Math.ceil(1.0 * tableSize / numPartitions);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine(
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(
"created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize + " #partitions:"
+ numPartitions);
}
@@ -159,8 +159,8 @@
if (hashTableForTuplePointer.isGarbageCollectionNeeded()) {
int numberOfFramesReclaimed = hashTableForTuplePointer.collectGarbage(bufferAccessor,
tpcIntermediate);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Garbage Collection on Hash table is done. Deallocated frames:"
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Garbage Collection on Hash table is done. Deallocated frames:"
+ numberOfFramesReclaimed);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
index d7b76ce..7e6e147 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.dataflow.std.group.external;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
@@ -35,11 +33,13 @@
import org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
import org.apache.hyracks.dataflow.std.group.ISpillableTable;
import org.apache.hyracks.dataflow.std.group.ISpillableTableFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExternalGroupBuildOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable
implements IRunFileWriterGenerator {
- private static Logger LOGGER = Logger.getLogger("ExternalGroupBuildPhase");
+ private static final Logger LOGGER = LogManager.getLogger();
private final IHyracksTaskContext ctx;
private final Object stateId;
private final int[] keyFields;
@@ -115,7 +115,7 @@
} else {
externalGroupBy.flushSpilledPartitions();
ctx.setStateObject(state);
- if (LOGGER.isLoggable(Level.FINE)) {
+ if (LOGGER.isDebugEnabled()) {
int numOfPartition = state.getSpillableTable().getNumPartitions();
int numOfSpilledPart = 0;
for (int i = 0; i < numOfPartition; i++) {
@@ -123,7 +123,7 @@
numOfSpilledPart++;
}
}
- LOGGER.fine("level 0:" + "build with " + numOfPartition + " partitions" + ", spilled "
+ LOGGER.debug("level 0:" + "build with " + numOfPartition + " partitions" + ", spilled "
+ numOfSpilledPart + " partitions");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
index 4d368bd..fb88775 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.dataflow.std.group.external;
import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.comm.VSizeFrame;
@@ -39,10 +37,12 @@
import org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory;
import org.apache.hyracks.dataflow.std.group.ISpillableTable;
import org.apache.hyracks.dataflow.std.group.ISpillableTableFactory;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExternalGroupWriteOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable
implements IRunFileWriterGenerator {
- private static Logger LOGGER = Logger.getLogger("ExternalGroupbyWrite");
+ private static final Logger LOGGER = LogManager.getLogger();
private final IHyracksTaskContext ctx;
private final Object stateId;
private final ISpillableTableFactory spillableTableFactory;
@@ -138,14 +138,14 @@
}
}
- if (LOGGER.isLoggable(Level.FINE)) {
+ if (LOGGER.isDebugEnabled()) {
int numOfSpilledPart = 0;
for (int x = 0; x < numOfTuples.length; x++) {
if (numOfTuples[x] > 0) {
numOfSpilledPart++;
}
}
- LOGGER.fine("level " + level + ":" + "build with " + numOfTuples.length + " partitions"
+ LOGGER.debug("level " + level + ":" + "build with " + numOfTuples.length + " partitions"
+ ", spilled " + numOfSpilledPart + " partitions");
}
doPass(partitionTable, runFileWriters, sizeInTuplesNextLevel, writer, level + 1);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
index 7f34d13..a51b780 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -22,8 +22,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.comm.IFrameWriter;
@@ -43,6 +41,8 @@
import org.apache.hyracks.dataflow.std.structures.ISerializableTable;
import org.apache.hyracks.dataflow.std.structures.TuplePointer;
import org.apache.hyracks.dataflow.std.util.FrameTuplePairComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class InMemoryHashJoin {
@@ -64,7 +64,7 @@
ISimpleFrameBufferManager bufferManager;
private final boolean isTableCapacityNotZero;
- private static final Logger LOGGER = Logger.getLogger(InMemoryHashJoin.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild,
@@ -112,7 +112,7 @@
} else {
isTableCapacityNotZero = false;
}
- LOGGER.fine("InMemoryHashJoin has been created for a table size of " + table.getTableSize() + " for Thread ID "
+ LOGGER.debug("InMemoryHashJoin has been created for a table size of " + table.getTableSize() + " for Thread ID "
+ Thread.currentThread().getId() + ".");
}
@@ -207,8 +207,8 @@
}
}
buffers.clear();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("InMemoryHashJoin has finished using " + nFrames + " frames for Thread ID "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("InMemoryHashJoin has finished using " + nFrames + " frames for Thread ID "
+ Thread.currentThread().getId() + ".");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index c795144..8dbe9b0 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -23,8 +23,6 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.VSizeFrame;
@@ -66,6 +64,8 @@
import org.apache.hyracks.dataflow.std.structures.ISerializableTable;
import org.apache.hyracks.dataflow.std.structures.SerializableHashTable;
import org.apache.hyracks.dataflow.std.util.FrameTuplePairComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* @author pouria
@@ -137,7 +137,7 @@
private boolean forceNLJ = false;
private boolean forceRoleReversal = false;
- private static final Logger LOGGER = Logger.getLogger(OptimizedHybridHashJoinOperatorDescriptor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public OptimizedHybridHashJoinOperatorDescriptor(IOperatorDescriptorRegistry spec, int memSizeInFrames,
int inputsize0, double factor, int[] keys0, int[] keys1,
@@ -294,8 +294,8 @@
buildHpc, predEvaluator, isLeftOuter, nonMatchWriterFactories);
state.hybridHJ.initBuild();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("OptimizedHybridHashJoin is starting the build phase with " + state.numOfPartitions
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("OptimizedHybridHashJoin is starting the build phase with " + state.numOfPartitions
+ " partitions using " + state.memForJoin + " frames for memory.");
}
}
@@ -312,8 +312,8 @@
state.hybridHJ.clearBuildTempFiles();
} else {
ctx.setStateObject(state);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("OptimizedHybridHashJoin closed its build phase");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("OptimizedHybridHashJoin closed its build phase");
}
}
}
@@ -395,8 +395,8 @@
writer.open();
state.hybridHJ.initProbe();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("OptimizedHybridHashJoin is starting the probe phase.");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("OptimizedHybridHashJoin is starting the probe phase.");
}
}
@@ -466,8 +466,8 @@
}
private void logProbeComplete() {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("OptimizedHybridHashJoin closed its probe phase");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("OptimizedHybridHashJoin closed its probe phase");
}
}
@@ -484,8 +484,8 @@
long probePartSize = (long) Math.ceil((double) probeSideReader.getFileSize() / (double) frameSize);
int beforeMax = Math.max(buildSizeInTuple, probeSizeInTuple);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("\n>>>Joining Partition Pairs (thread_id " + Thread.currentThread().getId()
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("\n>>>Joining Partition Pairs (thread_id " + Thread.currentThread().getId()
+ ") (pid " + ") - (level " + level + ")" + " - BuildSize:\t" + buildPartSize
+ "\tProbeSize:\t" + probePartSize + " - MemForJoin " + (state.memForJoin)
+ " - LeftOuter is " + isLeftOuter);
@@ -505,8 +505,8 @@
int tabSize = -1;
if (!forceRoleReversal && (isLeftOuter || (buildPartSize < probePartSize))) {
//Case 1.1 - InMemHJ (without Role-Reversal)
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("\t>>>Case 1.1 (IsLeftOuter || buildSize<probe) AND ApplyInMemHJ - [Level "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("\t>>>Case 1.1 (IsLeftOuter || buildSize<probe) AND ApplyInMemHJ - [Level "
+ level + "]");
}
tabSize = buildSizeInTuple;
@@ -518,8 +518,8 @@
applyInMemHashJoin(buildKeys, probeKeys, tabSize, buildRd, probeRd, buildHpc, probeHpc,
buildSideReader, probeSideReader); // checked-confirmed
} else { //Case 1.2 - InMemHJ with Role Reversal
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("\t>>>Case 1.2. (NoIsLeftOuter || probe<build) AND ApplyInMemHJ"
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("\t>>>Case 1.2. (NoIsLeftOuter || probe<build) AND ApplyInMemHJ"
+ "WITH RoleReversal - [Level " + level + "]");
}
tabSize = probeSizeInTuple;
@@ -534,22 +534,23 @@
}
//Apply (Recursive) HHJ
else {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("\t>>>Case 2. ApplyRecursiveHHJ - [Level " + level + "]");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("\t>>>Case 2. ApplyRecursiveHHJ - [Level " + level + "]");
}
if (!forceRoleReversal && (isLeftOuter || buildPartSize < probePartSize)) {
//Case 2.1 - Recursive HHJ (without Role-Reversal)
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("\t\t>>>Case 2.1 - RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level "
- + level + "]");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(
+ "\t\t>>>Case 2.1 - RecursiveHHJ WITH (isLeftOuter || build<probe) - [Level "
+ + level + "]");
}
applyHybridHashJoin((int) buildPartSize, PROBE_REL, BUILD_REL, probeKeys, buildKeys,
probeRd, buildRd, probeHpc, buildHpc, probeSideReader, buildSideReader, level,
beforeMax);
} else { //Case 2.2 - Recursive HHJ (with Role-Reversal)
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine(
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(
"\t\t>>>Case 2.2. - RecursiveHHJ WITH RoleReversal - [Level " + level + "]");
}
@@ -618,8 +619,8 @@
BitSet rPStatus = rHHj.getPartitionStatus();
if (!forceNLJ && (afterMax < (NLJ_SWITCH_THRESHOLD * beforeMax))) {
//Case 2.1.1 - Keep applying HHJ
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("\t\t>>>Case 2.1.1 - KEEP APPLYING RecursiveHHJ WITH "
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("\t\t>>>Case 2.1.1 - KEEP APPLYING RecursiveHHJ WITH "
+ "(isLeftOuter || build<probe) - [Level " + level + "]");
}
for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
@@ -644,8 +645,8 @@
}
} else { //Case 2.1.2 - Switch to NLJ
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine(
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(
"\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH "
+ "(isLeftOuter || build<probe) - [Level " + level + "]");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
index c8f9268..4a77b3c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractExternalSortRunMerger.java
@@ -22,8 +22,6 @@
import java.util.BitSet;
import java.util.LinkedList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.FrameConstants;
import org.apache.hyracks.api.comm.IFrameWriter;
@@ -38,6 +36,9 @@
import org.apache.hyracks.dataflow.common.io.RunFileReader;
import org.apache.hyracks.dataflow.common.io.RunFileWriter;
import org.apache.hyracks.dataflow.std.sort.util.GroupVSizeFrame;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractExternalSortRunMerger {
@@ -55,7 +56,7 @@
private VSizeFrame outputFrame;
private ISorter sorter;
- private static final Logger LOGGER = Logger.getLogger(AbstractExternalSortRunMerger.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public AbstractExternalSortRunMerger(IHyracksTaskContext ctx, ISorter sorter, List<GeneratedRunFileReader> runs,
IBinaryComparator[] comparators, INormalizedKeyComputer nmkComputer, RecordDescriptor recordDesc,
@@ -147,8 +148,8 @@
if (currentGenerationRunAvailable.isEmpty()) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("generated runs:" + stop);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("generated runs:" + stop);
}
runs.subList(0, stop).clear();
currentGenerationRunAvailable.clear();
@@ -156,9 +157,7 @@
stop = runs.size();
}
} else {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("final runs:" + stop);
- }
+ LOGGER.debug("final runs: {}", stop);
merge(finalWriter, partialRuns);
break;
}
@@ -179,8 +178,8 @@
try {
reader.close(); // close is idempotent.
} catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
}
}
@@ -259,8 +258,8 @@
}
} finally {
merger.close();
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Output " + io + " frames");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Output " + io + " frames");
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java
index 6c061ae..eead09e 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractFrameSorter.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.dataflow.std.sort;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAppender;
@@ -41,10 +39,12 @@
import org.apache.hyracks.dataflow.std.buffermanager.IFrameBufferManager;
import org.apache.hyracks.dataflow.std.buffermanager.VariableFramePool;
import org.apache.hyracks.util.IntSerDeUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractFrameSorter implements IFrameSorter {
- protected Logger LOGGER = Logger.getLogger(AbstractFrameSorter.class.getName());
+ protected Logger LOGGER = LogManager.getLogger();
protected static final int ID_FRAME_ID = 0;
protected static final int ID_TUPLE_START = 1;
protected static final int ID_TUPLE_END = 2;
@@ -233,8 +233,8 @@
}
maxFrameSize = Math.max(maxFrameSize, outputFrame.getFrameSize());
outputAppender.write(writer, true);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine(
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(
"Flushed records:" + limit + " out of " + tupleCount + "; Flushed through " + (io + 1) + " frames");
}
return maxFrameSize;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
index 602157f..f274ca1 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
@@ -21,8 +21,6 @@
import java.nio.ByteBuffer;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -45,10 +43,12 @@
import org.apache.hyracks.dataflow.std.base.AbstractStateObject;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractSorterOperatorDescriptor extends AbstractOperatorDescriptor {
- private static final Logger LOGGER = Logger.getLogger(AbstractSorterOperatorDescriptor.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long serialVersionUID = 1L;
@@ -132,7 +132,7 @@
runGen.close();
state.generatedRunFileReaders = runGen.getRuns();
state.sorter = runGen.getSorter();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("InitialNumberOfRuns:" + runGen.getRuns().size());
}
ctx.setStateObject(state);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
index 80b36ce..180ecbc 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.dataflow.std.sort;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -34,9 +32,11 @@
import org.apache.hyracks.dataflow.std.buffermanager.FrameFreeSlotPolicyFactory;
import org.apache.hyracks.dataflow.std.buffermanager.VariableFrameMemoryManager;
import org.apache.hyracks.dataflow.std.buffermanager.VariableFramePool;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class HybridTopKSortRunGenerator extends HeapSortRunGenerator {
- private static final Logger LOG = Logger.getLogger(HybridTopKSortRunGenerator.class.getName());
+ private static final Logger LOG = LogManager.getLogger();
private static final int SWITCH_TO_FRAME_SORTER_THRESHOLD = 2;
private IFrameSorter frameSorter = null;
@@ -90,8 +90,8 @@
}
tupleSorter.close();
tupleSorter = null;
- if (LOG.isLoggable(Level.FINE)) {
- LOG.fine("clear tupleSorter");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("clear tupleSorter");
}
}
}
@@ -103,8 +103,8 @@
frameLimit - 1));
frameSorter = new FrameSorterMergeSort(ctx, bufferManager, frameLimit - 1, sortFields, nmkFactories,
comparatorFactories, recordDescriptor, topK);
- if (LOG.isLoggable(Level.FINE)) {
- LOG.fine("create frameSorter");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("create frameSorter");
}
}
if (!frameSorter.insertFrame(buffer)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java
index 980857a..b02f859 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TupleSorterHeapSort.java
@@ -21,8 +21,6 @@
import java.util.Arrays;
import java.util.Comparator;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -42,10 +40,12 @@
import org.apache.hyracks.dataflow.std.structures.IResetableComparableFactory;
import org.apache.hyracks.dataflow.std.structures.MaxHeap;
import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TupleSorterHeapSort implements ITupleSorter {
- private static final Logger LOGGER = Logger.getLogger(TupleSorterHeapSort.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
class HeapEntryFactory implements IResetableComparableFactory<HeapEntry> {
@Override
@@ -304,7 +304,7 @@
}
maxFrameSize = Math.max(maxFrameSize, outputFrame.getFrameSize());
outputAppender.write(writer, true);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Flushed records:" + numEntries + "; Flushed through " + (io + 1) + " frames");
}
return maxFrameSize;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index d894d9b..7472aa8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -179,5 +179,9 @@
<artifactId>hyracks-util</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
index 8696f8b..7a675bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
@@ -24,8 +24,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameReader;
@@ -44,10 +42,12 @@
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import org.apache.hyracks.test.support.TestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
public class SerializationDeserializationTest {
- private static final Logger LOGGER = Logger.getLogger(SerializationDeserializationTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String DBLP_FILE = "data" + File.separator + "device1" + File.separator + "data"
+ File.separator + "dblp.txt";
@@ -139,7 +139,7 @@
reader.open();
Object[] arr;
while ((arr = reader.readData()) != null) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(arr[0] + " " + arr[1]);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index 82fd737..ae27ac9 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -30,8 +30,6 @@
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.apache.hyracks.api.client.HyracksConnection;
@@ -55,12 +53,14 @@
import org.apache.hyracks.control.nc.resources.memory.FrameManager;
import org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
public abstract class AbstractIntegrationTest {
- private static final Logger LOGGER = Logger.getLogger(AbstractIntegrationTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final String NC1_ID = "nc1";
public static final String NC2_ID = "nc2";
@@ -121,7 +121,7 @@
nc2.start();
hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
}
}
@@ -134,11 +134,11 @@
}
protected JobId executeTest(JobSpecification spec) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(spec.toJSON().asText());
}
JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(jobId.toString());
}
return jobId;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index cc46a7d..a455cc9 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -23,8 +23,6 @@
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.apache.hyracks.api.client.HyracksConnection;
@@ -50,6 +48,8 @@
import org.apache.hyracks.control.nc.resources.memory.FrameManager;
import org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -58,7 +58,7 @@
public abstract class AbstractMultiNCIntegrationTest {
- private static final Logger LOGGER = Logger.getLogger(AbstractMultiNCIntegrationTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final TestJobLifecycleListener jobLifecycleListener = new TestJobLifecycleListener();
public static final String[] ASTERIX_IDS =
@@ -113,7 +113,7 @@
}
hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
}
}
@@ -144,11 +144,11 @@
}
protected void runTest(JobSpecification spec, String expectedErrorMessage) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(spec.toJSON().asText());
}
JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(jobId.toString());
}
@@ -218,7 +218,7 @@
}
private void dumpOutputFiles() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
for (File f : outputFiles) {
if (f.exists() && f.isFile()) {
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
index dd4fdd1..553c5b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
@@ -25,8 +25,6 @@
import java.io.File;
import java.lang.reflect.Field;
import java.util.HashMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.apache.hyracks.api.client.HyracksConnection;
@@ -40,6 +38,8 @@
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@@ -47,7 +47,7 @@
import org.mockito.Mockito;
public class DeployedJobSpecsTest {
- private static final Logger LOGGER = Logger.getLogger(DeployedJobSpecsTest.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String NC1_ID = "nc1";
private static final String NC2_ID = "nc2";
@@ -111,7 +111,7 @@
nc2.start();
hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java
index c8d0b9c..008be29 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TestJobLifecycleListener.java
@@ -24,18 +24,19 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.IJobLifecycleListener;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.api.job.JobStatus;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class TestJobLifecycleListener implements IJobLifecycleListener {
- private static final Logger LOGGER = Logger.getLogger(TestJobLifecycleListener.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final Map<JobId, JobSpecification> created = new HashMap<>();
private final Set<JobId> started = new HashSet<>();
private final Set<JobId> finished = new HashSet<>();
@@ -48,7 +49,7 @@
@Override
public void notifyJobCreation(JobId jobId, JobSpecification spec) throws HyracksException {
if (created.containsKey(jobId)) {
- LOGGER.log(Level.WARNING, "Job " + jobId + "has been created before");
+ LOGGER.log(Level.WARN, "Job " + jobId + "has been created before");
increment(doubleCreated, jobId);
}
created.put(jobId, spec);
@@ -63,11 +64,11 @@
@Override
public void notifyJobStart(JobId jobId) throws HyracksException {
if (!created.containsKey(jobId)) {
- LOGGER.log(Level.WARNING, "Job " + jobId + "has not been created");
+ LOGGER.log(Level.WARN, "Job " + jobId + "has not been created");
startWithoutCreate.add(jobId);
}
if (started.contains(jobId)) {
- LOGGER.log(Level.WARNING, "Job " + jobId + "has been started before");
+ LOGGER.log(Level.WARN, "Job " + jobId + "has been started before");
increment(doubleStarted, jobId);
}
started.add(jobId);
@@ -76,43 +77,43 @@
@Override
public void notifyJobFinish(JobId jobId, JobStatus jobStatus, List<Exception> exceptions) throws HyracksException {
if (!started.contains(jobId)) {
- LOGGER.log(Level.WARNING, "Job " + jobId + "has not been started");
+ LOGGER.log(Level.WARN, "Job " + jobId + "has not been started");
finishWithoutStart.add(jobId);
}
if (finished.contains(jobId)) {
// TODO: job finish should be called once only when it has really completed
// throw new HyracksDataException("Job " + jobId + "has been finished before");
- LOGGER.log(Level.WARNING, "Dangerous: Duplicate Job: " + jobId + " has finished with status: " + jobStatus);
+ LOGGER.log(Level.WARN, "Dangerous: Duplicate Job: " + jobId + " has finished with status: " + jobStatus);
increment(doubleFinished, jobId);
}
finished.add(jobId);
}
public void check() throws Exception {
- LOGGER.log(Level.WARNING, "Checking all created jobs have started");
+ LOGGER.log(Level.WARN, "Checking all created jobs have started");
for (JobId jobId : created.keySet()) {
if (!started.contains(jobId)) {
- LOGGER.log(Level.WARNING, "JobId " + jobId + " has been created but never started");
+ LOGGER.log(Level.WARN, "JobId " + jobId + " has been created but never started");
}
}
- LOGGER.log(Level.WARNING, "Checking all started jobs have terminated");
+ LOGGER.log(Level.WARN, "Checking all started jobs have terminated");
for (JobId jobId : started) {
if (!finished.contains(jobId)) {
- LOGGER.log(Level.WARNING, "JobId " + jobId + " has started but not finished");
+ LOGGER.log(Level.WARN, "JobId " + jobId + " has started but not finished");
}
}
- LOGGER.log(Level.WARNING, "Checking multiple creates");
+ LOGGER.log(Level.WARN, "Checking multiple creates");
for (Entry<JobId, Integer> entry : doubleCreated.entrySet()) {
- LOGGER.log(Level.WARNING, "job " + entry.getKey() + " has been created " + entry.getValue() + " times");
+ LOGGER.log(Level.WARN, "job " + entry.getKey() + " has been created " + entry.getValue() + " times");
}
- LOGGER.log(Level.WARNING, "Checking multiple starts");
+ LOGGER.log(Level.WARN, "Checking multiple starts");
for (Entry<JobId, Integer> entry : doubleStarted.entrySet()) {
- LOGGER.log(Level.WARNING, "job " + entry.getKey() + " has been started " + entry.getValue() + " times");
+ LOGGER.log(Level.WARN, "job " + entry.getKey() + " has been started " + entry.getValue() + " times");
}
- LOGGER.log(Level.WARNING, "Checking multiple finishes");
+ LOGGER.log(Level.WARN, "Checking multiple finishes");
for (Entry<JobId, Integer> entry : doubleFinished.entrySet()) {
- LOGGER.log(Level.WARNING, "job " + entry.getKey() + " has been finished " + entry.getValue() + " times");
+ LOGGER.log(Level.WARN, "job " + entry.getKey() + " has been finished " + entry.getValue() + " times");
}
- LOGGER.log(Level.WARNING, "Done checking!");
+ LOGGER.log(Level.WARN, "Done checking!");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java
index 6503b7b..799520c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/WaitingOperatorDescriptor.java
@@ -18,8 +18,6 @@
*/
package org.apache.hyracks.tests.integration;
-import java.util.logging.Logger;
-
import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -35,7 +33,6 @@
public static final MutableBoolean CONTINUE_RUNNING = new MutableBoolean(false);
private static final long serialVersionUID = 1L;
- private static Logger LOGGER = Logger.getLogger(WaitingOperatorDescriptor.class.getName());
public WaitingOperatorDescriptor(IOperatorDescriptorRegistry spec, int inputArity, int outputArity) {
super(spec, inputArity, outputArity);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
index d704671..10d6947 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
@@ -20,8 +20,6 @@
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -31,10 +29,13 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExceptionOnCreatePushRuntimeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
private static final long serialVersionUID = 1L;
- private static Logger LOGGER = Logger.getLogger(ExceptionOnCreatePushRuntimeOperatorDescriptor.class.getName());
+ private static Logger LOGGER = LogManager.getLogger();
private static AtomicInteger createPushRuntime = new AtomicInteger();
private static AtomicInteger initializeCounter = new AtomicInteger();
private static AtomicInteger openCloseCounter = new AtomicInteger();
@@ -130,10 +131,10 @@
public static boolean succeed() {
boolean success = openCloseCounter.get() == 0 && createPushRuntime.get() == 0 && initializeCounter.get() == 0;
if (!success) {
- LOGGER.log(Level.SEVERE, "Failure:");
- LOGGER.log(Level.SEVERE, "CreatePushRuntime:" + createPushRuntime.get());
- LOGGER.log(Level.SEVERE, "InitializeCounter:" + initializeCounter.get());
- LOGGER.log(Level.SEVERE, "OpenCloseCounter:" + openCloseCounter.get());
+ LOGGER.log(Level.ERROR, "Failure:");
+ LOGGER.log(Level.ERROR, "CreatePushRuntime:" + createPushRuntime.get());
+ LOGGER.log(Level.ERROR, "InitializeCounter:" + initializeCounter.get());
+ LOGGER.log(Level.ERROR, "OpenCloseCounter:" + openCloseCounter.get());
}
return success;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
index 478650f..1514c8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
@@ -209,5 +209,9 @@
<artifactId>hyracks-control-cc</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
index c1083c9f..6f4d8b1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
@@ -19,8 +19,9 @@
package org.apache.hyracks.examples.shutdown.test;
import java.net.ServerSocket;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -30,7 +31,7 @@
import org.apache.hyracks.ipc.exceptions.IPCException;
public class ClusterShutdownIT {
- private static Logger LOGGER = Logger.getLogger(ClusterShutdownIT.class.getName());
+ private static Logger LOGGER = LogManager.getLogger();
@Rule
public ExpectedException closeTwice = ExpectedException.none();
@Test
@@ -49,7 +50,7 @@
s = new ServerSocket(1099);
//and we should be able to bind to this too
} catch (Exception e) {
- LOGGER.severe(e.getMessage());
+ LOGGER.error(e.getMessage());
throw e;
} finally {
s.close();
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
index 678645c..e1c91c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -410,5 +410,9 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
index 4937a15..c9bf547 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -25,7 +25,6 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
-import java.util.logging.Logger;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.InputSplit;
@@ -33,10 +32,12 @@
import org.apache.hyracks.api.topology.ClusterTopology;
import org.apache.hyracks.hdfs.api.INcCollection;
import org.apache.hyracks.hdfs.api.INcCollectionBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@SuppressWarnings("deprecation")
public class RackAwareNcCollectionBuilder implements INcCollectionBuilder {
- private static final Logger LOGGER = Logger.getLogger(RackAwareNcCollectionBuilder.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private ClusterTopology topology;
public RackAwareNcCollectionBuilder(ClusterTopology topology) {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
index f9b68bc..615f827 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
@@ -30,7 +30,6 @@
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Random;
-import java.util.logging.Logger;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.InputSplit;
@@ -42,13 +41,15 @@
import org.apache.hyracks.api.topology.ClusterTopology;
import org.apache.hyracks.hdfs.api.INcCollection;
import org.apache.hyracks.hdfs.api.INcCollectionBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* The scheduler conduct data-local scheduling for data reading on HDFS. This
* class works for Hadoop old API.
*/
public class Scheduler {
- private static final Logger LOGGER = Logger.getLogger(Scheduler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
/** a list of NCs */
private String[] NCs;
diff --git a/hyracks-fullstack/hyracks/hyracks-http/pom.xml b/hyracks-fullstack/hyracks/hyracks-http/pom.xml
index 7d0ddb2..099c429 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-http/pom.xml
@@ -75,5 +75,9 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
index d0637ca..21653c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
@@ -24,13 +24,14 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServlet;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
@@ -40,7 +41,7 @@
import io.netty.handler.codec.http.HttpResponseStatus;
public abstract class AbstractServlet implements IServlet {
- private static final Logger LOGGER = Logger.getLogger(AbstractServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
static {
@@ -99,11 +100,11 @@
notAllowed(method, response);
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Unhandled exception", e);
+ LOGGER.log(Level.WARN, "Unhandled exception", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
} catch (Throwable th) { //NOSONAR Just logging and then throwing again
try {
- LOGGER.log(Level.WARNING, "Unhandled throwable", th);
+ LOGGER.log(Level.WARN, "Unhandled throwable", th);
} catch (Throwable loggingFailure) {// NOSONAR... swallow logging failure
}
throw th;
@@ -116,7 +117,7 @@
if (message != null) {
response.writer().println(message);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("sendError: status=" + status + ", message=" + message);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java
index e4f0777..d5f81e5 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedNettyOutputStream.java
@@ -20,8 +20,10 @@
import java.io.IOException;
import java.io.OutputStream;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
@@ -31,7 +33,7 @@
public class ChunkedNettyOutputStream extends OutputStream {
- private static final Logger LOGGER = Logger.getLogger(ChunkedNettyOutputStream.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ChannelHandlerContext ctx;
private final ChunkedResponse response;
private ByteBuf buffer;
@@ -132,7 +134,7 @@
wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- LOGGER.log(Level.WARNING, "Interupted while waiting for channel to be writable", e);
+ LOGGER.log(Level.WARN, "Interupted while waiting for channel to be writable", e);
throw new IOException(e);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java
index d8e9a9a..323a463 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/ChunkedResponse.java
@@ -21,10 +21,11 @@
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServletResponse;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
@@ -60,7 +61,7 @@
*/
public class ChunkedResponse implements IServletResponse {
- private static final Logger LOGGER = Logger.getLogger(ChunkedResponse.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ChannelHandlerContext ctx;
private final ChunkedNettyOutputStream outputStream;
private final PrintWriter writer;
@@ -112,7 +113,7 @@
} else {
// There was an error
if (headerSent) {
- LOGGER.log(Level.WARNING, "Error after header write of chunked response");
+ LOGGER.log(Level.WARN, "Error after header write of chunked response");
if (error != null) {
error.release();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
index 13ed1fc..cb6ad0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
@@ -18,8 +18,9 @@
*/
package org.apache.hyracks.http.server;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
@@ -40,7 +41,7 @@
*/
public class HttpRequestCapacityController extends ChannelInboundHandlerAdapter {
- private static final Logger LOGGER = Logger.getLogger(HttpRequestCapacityController.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final HttpServer server;
private boolean overloaded = false;
@@ -74,7 +75,7 @@
ctx.writeAndFlush(ctx.alloc().buffer(0), promise);
} catch (Throwable th) {//NOSONAR
try {
- LOGGER.log(Level.SEVERE, "Failure during request rejection", th);
+ LOGGER.log(Level.ERROR, "Failure during request rejection", th);
} catch (Throwable loggingFailure) {//NOSONAR
}
PromiseNotificationUtil.tryFailure(promise, th, null);
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java
index 555f845..bf8e629 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestHandler.java
@@ -20,12 +20,13 @@
import java.io.IOException;
import java.util.concurrent.Callable;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServlet;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
@@ -34,7 +35,7 @@
import io.netty.handler.codec.http.HttpUtil;
public class HttpRequestHandler implements Callable<Void> {
- private static final Logger LOGGER = Logger.getLogger(HttpRequestHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ChannelHandlerContext ctx;
private final IServlet servlet;
private final IServletRequest request;
@@ -57,7 +58,7 @@
lastContentFuture.addListener(ChannelFutureListener.CLOSE);
}
} catch (Throwable th) { //NOSONAR
- LOGGER.log(Level.SEVERE, "Failure handling HTTP Request", th);
+ LOGGER.log(Level.ERROR, "Failure handling HTTP Request", th);
ctx.close();
} finally {
request.getHttpRequest().release();
@@ -69,7 +70,7 @@
try {
servlet.handle(request, response);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure during handling of an IServletRequest", e);
+ LOGGER.log(Level.WARN, "Failure during handling of an IServletRequest", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
} finally {
response.close();
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java
index 71ddbc0..19436ab 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServer.java
@@ -27,11 +27,12 @@
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServlet;
import org.apache.hyracks.util.ThreadDumpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.PooledByteBufAllocator;
@@ -54,7 +55,7 @@
protected static final int RECEIVE_BUFFER_SIZE = 4096;
protected static final int DEFAULT_NUM_EXECUTOR_THREADS = 16;
protected static final int DEFAULT_REQUEST_QUEUE_SIZE = 256;
- private static final Logger LOGGER = Logger.getLogger(HttpServer.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int FAILED = -1;
private static final int STOPPED = 0;
private static final int STARTING = 1;
@@ -111,7 +112,7 @@
doStart();
setStarted();
} catch (Throwable e) { // NOSONAR
- LOGGER.log(Level.SEVERE, "Failure starting an Http Server with port: " + port, e);
+ LOGGER.log(Level.ERROR, "Failure starting an Http Server with port: " + port, e);
setFailed(e);
throw e;
}
@@ -128,7 +129,7 @@
doStop();
setStopped();
} catch (Throwable e) { // NOSONAR
- LOGGER.log(Level.SEVERE, "Failure stopping an Http Server", e);
+ LOGGER.log(Level.ERROR, "Failure stopping an Http Server", e);
setFailed(e);
throw e;
}
@@ -229,15 +230,15 @@
// wait 30s for interrupted requests to unwind
executor.awaitTermination(30, TimeUnit.SECONDS);
if (!executor.isTerminated()) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.SEVERE,
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR,
"Failed to shutdown http server executor; thread dump: " + ThreadDumpUtil.takeDumpString());
} else {
- LOGGER.log(Level.SEVERE, "Failed to shutdown http server executor");
+ LOGGER.log(Level.ERROR, "Failed to shutdown http server executor");
}
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Error while shutting down http server executor", e);
+ LOGGER.log(Level.ERROR, "Error while shutting down http server executor", e);
}
channel.close();
channel.closeFuture().sync();
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java
index 0984ebf..9290cdf 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpServerHandler.java
@@ -20,12 +20,13 @@
import java.io.IOException;
import java.util.concurrent.RejectedExecutionException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServlet;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
@@ -37,7 +38,7 @@
public class HttpServerHandler<T extends HttpServer> extends SimpleChannelInboundHandler<Object> {
- private static final Logger LOGGER = Logger.getLogger(HttpServerHandler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final T server;
protected final int chunkSize;
protected HttpRequestHandler handler;
@@ -71,7 +72,7 @@
submit(ctx, servlet, request);
}
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "Failure Submitting HTTP Request", e);
+ LOGGER.log(Level.ERROR, "Failure Submitting HTTP Request", e);
respond(ctx, request.protocolVersion(), new HttpResponseStatus(500, e.getMessage()));
}
}
@@ -86,7 +87,7 @@
try {
servletRequest = HttpUtil.toServletRequest(request);
} catch (IllegalArgumentException e) {
- LOGGER.log(Level.WARNING, "Failure Decoding Request", e);
+ LOGGER.log(Level.WARN, "Failure Decoding Request", e);
respond(ctx, request.protocolVersion(), HttpResponseStatus.BAD_REQUEST);
return;
}
@@ -98,21 +99,21 @@
try {
server.getExecutor(handler).submit(handler);
} catch (RejectedExecutionException e) { // NOSONAR
- LOGGER.log(Level.WARNING, "Request rejected by server executor service. " + e.getMessage());
+ LOGGER.log(Level.WARN, "Request rejected by server executor service. " + e.getMessage());
handler.reject();
}
}
protected void handleServletNotFound(ChannelHandlerContext ctx, FullHttpRequest request) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("No servlet for " + request.uri());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("No servlet for " + request.uri());
}
respond(ctx, request.protocolVersion(), HttpResponseStatus.NOT_FOUND);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
- LOGGER.log(Level.SEVERE, "Failure handling HTTP Request", cause);
+ LOGGER.log(Level.ERROR, "Failure handling HTTP Request", cause);
ctx.close();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java
index b21e533..e2b3237 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/StaticResourceServlet.java
@@ -22,19 +22,20 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.io.IOUtils;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
public class StaticResourceServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(StaticResourceServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public StaticResourceServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
super(ctx, paths);
@@ -63,7 +64,7 @@
try {
IOUtils.copy(is, out);
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Failure copying response", e);
+ LOGGER.log(Level.WARN, "Failure copying response", e);
} finally {
if (out != null) {
IOUtils.closeQuietly(out);
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java
index 99e334c..ffa4d4b 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/utils/HttpUtil.java
@@ -28,14 +28,15 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.BaseRequest;
import org.apache.hyracks.http.server.FormUrlEncodedRequest;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.handler.codec.http.FullHttpRequest;
@@ -45,7 +46,7 @@
public class HttpUtil {
- private static final Logger LOGGER = Logger.getLogger(HttpUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final Pattern PARENT_DIR = Pattern.compile("/[^./]+/\\.\\./");
private static long maxMemUsage = 0L;
@@ -207,7 +208,7 @@
report.append('\n');
report.append('\n');
} catch (Throwable th) { // NOSONAR
- LOGGER.log(Level.WARNING, "Failed to access PlatformDependent.DIRECT_MEMORY_COUNTER", th);
+ LOGGER.log(Level.WARN, "Failed to access PlatformDependent.DIRECT_MEMORY_COUNTER", th);
return;
}
report.append("--------------- PooledByteBufAllocator.DEFAULT ----------------");
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java
index 5bd2e38..e6aedb9 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/test/java/org/apache/hyracks/http/servlet/ChattyServlet.java
@@ -19,18 +19,19 @@
package org.apache.hyracks.http.servlet;
import java.util.concurrent.ConcurrentMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import io.netty.handler.codec.http.HttpResponseStatus;
public class ChattyServlet extends AbstractServlet {
- private static final Logger LOGGER = Logger.getLogger(ChattyServlet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private byte[] bytes;
public ChattyServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
@@ -53,7 +54,7 @@
protected void get(IServletRequest request, IServletResponse response) throws Exception {
response.setStatus(HttpResponseStatus.OK);
HttpUtil.setContentType(response, HttpUtil.ContentType.TEXT_HTML, HttpUtil.Encoding.UTF8);
- LOGGER.log(Level.WARNING, "I am about to flood you... and a single buffer is " + bytes.length + " bytes");
+ LOGGER.log(Level.WARN, "I am about to flood you... and a single buffer is " + bytes.length + " bytes");
for (int i = 0; i < 100; i++) {
response.outputStream().write(bytes);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml b/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
index b49e99e..5ef436f 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/pom.xml
@@ -49,5 +49,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
index 36cf2fd..7688974 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
@@ -39,13 +39,14 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.io.IOUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IPCConnectionManager {
- private static final Logger LOGGER = Logger.getLogger(IPCConnectionManager.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
// TODO(mblow): the next two could be config parameters
private static final int INITIAL_RETRY_DELAY_MILLIS = 100;
@@ -120,7 +121,7 @@
return handle;
}
if (maxRetries < 0 || retries++ < maxRetries) {
- LOGGER.warning("Connection to " + remoteAddress + " failed; retrying" + (maxRetries <= 0 ? ""
+ LOGGER.warn("Connection to " + remoteAddress + " failed; retrying" + (maxRetries <= 0 ? ""
: " (retry attempt " + retries + " of " + maxRetries + ") after " + delay + "ms"));
Thread.sleep(delay);
delay = Math.min(MAX_RETRY_DELAY_MILLIS, (int) (delay * 1.5));
@@ -136,8 +137,8 @@
}
synchronized void write(Message msg) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Enqueued message: " + msg);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Enqueued message: " + msg);
}
sendList.add(msg);
networkThread.selector.wakeup();
@@ -209,8 +210,8 @@
int failingLoops = 0;
while (!stopped) {
try {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Starting Select");
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Starting Select");
}
int n = selector.select();
collectOutstandingWork();
@@ -238,9 +239,7 @@
int len = workingSendList.size();
for (int i = 0; i < len; ++i) {
Message msg = workingSendList.get(i);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Processing send of message: " + msg);
- }
+ LOGGER.debug(() -> "Processing send of message: " + msg);
IPCHandle handle = msg.getIPCHandle();
if (handle.getState() != HandleState.CLOSED) {
if (!handle.full()) {
@@ -340,7 +339,7 @@
failingLoops = 0;
} catch (Exception e) {
int sleepSecs = (int)Math.pow(2, Math.min(11, failingLoops++));
- LOGGER.log(Level.SEVERE, "Exception processing message; sleeping " + sleepSecs
+ LOGGER.log(Level.ERROR, "Exception processing message; sleeping " + sleepSecs
+ " seconds", e);
try {
Thread.sleep(TimeUnit.SECONDS.toMillis(sleepSecs));
@@ -364,10 +363,10 @@
try {
connectFinished = channel.finishConnect();
if (!connectFinished) {
- LOGGER.log(Level.WARNING, "Channel connect did not finish");
+ LOGGER.log(Level.WARN, "Channel connect did not finish");
}
} catch (IOException e) {
- LOGGER.log(Level.WARNING, "Exception finishing channel connect", e);
+ LOGGER.log(Level.WARN, "Exception finishing channel connect", e);
}
return connectFinished;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java
index f7e0af1..8e38651 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCSystem.java
@@ -21,17 +21,18 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.concurrent.atomic.AtomicLong;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.ipc.api.IIPCHandle;
import org.apache.hyracks.ipc.api.IIPCI;
import org.apache.hyracks.ipc.api.IPCPerformanceCounters;
import org.apache.hyracks.ipc.api.IPayloadSerializerDeserializer;
import org.apache.hyracks.ipc.exceptions.IPCException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IPCSystem {
- private static final Logger LOGGER = Logger.getLogger(IPCSystem.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IPCConnectionManager cMgr;
diff --git a/hyracks-fullstack/hyracks/hyracks-net/pom.xml b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
index 8da177e..66edba6 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
@@ -51,5 +51,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java
index e50ffd2..0b548f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/AbstractChannelWriteInterface.java
@@ -21,17 +21,17 @@
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IBufferAcceptor;
import org.apache.hyracks.api.comm.IChannelControlBlock;
import org.apache.hyracks.api.comm.IChannelWriteInterface;
import org.apache.hyracks.api.comm.ICloseableBufferAcceptor;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractChannelWriteInterface implements IChannelWriteInterface {
- private static final Logger LOGGER = Logger.getLogger(AbstractChannelWriteInterface.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final IChannelControlBlock ccb;
protected final Queue<ByteBuffer> wiFullQueue;
protected boolean channelWritabilityState;
@@ -129,8 +129,8 @@
public void close() {
synchronized (ccb) {
if (eos) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Received duplicate close() on channel: " + ccb.getChannelId());
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Received duplicate close() on channel: " + ccb.getChannelId());
}
return;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java
index 49bb292..bd42560 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/ChannelSet.java
@@ -20,15 +20,15 @@
import java.util.Arrays;
import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
import org.apache.hyracks.api.comm.MuxDemuxCommand;
import org.apache.hyracks.api.exceptions.NetException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ChannelSet {
- private static final Logger LOGGER = Logger.getLogger(ChannelSet.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int INITIAL_SIZE = 16;
@@ -82,8 +82,8 @@
ChannelControlBlock ccb = ccbArray[i];
if (ccb != null) {
if (ccb.completelyClosed()) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Cleaning free channel: " + ccb);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Cleaning free channel: " + ccb);
}
freeChannel(ccb);
}
@@ -218,8 +218,8 @@
if (ccbArray[idx] != null) {
assert ccbArray[idx].completelyClosed() : ccbArray[idx].toString();
if (ccbArray[idx].completelyClosed()) {
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Cleaning free channel: " + ccbArray[idx]);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Cleaning free channel: " + ccbArray[idx]);
}
freeChannel(ccbArray[idx]);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java
index 418ebd7..17b70a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/FullFrameChannelWriteInterface.java
@@ -18,18 +18,17 @@
*/
package org.apache.hyracks.net.protocols.muxdemux;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.comm.IBufferFactory;
import org.apache.hyracks.api.comm.IChannelControlBlock;
import org.apache.hyracks.api.comm.IConnectionWriterState;
import org.apache.hyracks.api.comm.MuxDemuxCommand;
import org.apache.hyracks.api.exceptions.NetException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class FullFrameChannelWriteInterface extends AbstractChannelWriteInterface {
- private static final Logger LOGGER = Logger.getLogger(FullFrameChannelWriteInterface.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
FullFrameChannelWriteInterface(IChannelControlBlock ccb) {
super(ccb);
@@ -77,8 +76,8 @@
}
channelWritabilityState = true;
if (eos) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Received duplicate close() on channel: " + channelId);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Received duplicate close() on channel: " + channelId);
}
return;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
index 8c54c65..81636de 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
@@ -23,8 +23,6 @@
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import java.util.BitSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IChannelControlBlock;
import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
@@ -33,6 +31,8 @@
import org.apache.hyracks.api.exceptions.NetException;
import org.apache.hyracks.net.protocols.tcp.ITCPConnectionEventListener;
import org.apache.hyracks.net.protocols.tcp.TCPConnection;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
/**
* A {@link MultiplexedConnection} can be used by clients to create multiple "channels"
@@ -41,7 +41,7 @@
* @author vinayakb
*/
public class MultiplexedConnection implements ITCPConnectionEventListener {
- private static final Logger LOGGER = Logger.getLogger(MultiplexedConnection.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MAX_CHUNKS_READ_PER_CYCLE = 4;
@@ -366,8 +366,8 @@
}
readerState.readBuffer.flip();
readerState.command.read(readerState.readBuffer);
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Received command: " + readerState.command);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Received command: " + readerState.command);
}
ChannelControlBlock ccb = null;
switch (readerState.command.getCommandType()) {
@@ -409,8 +409,8 @@
muxDemux.getChannelOpenListener().channelOpened(ccb);
}
}
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("Applied command: " + readerState.command + " on " + ccb);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Applied command: " + readerState.command + " on " + ccb);
}
}
if (readerState.pendingReadSize > 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-server/pom.xml b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
index 8ad94e3..42a29fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
@@ -176,5 +176,9 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java
index c387ceb..0c7be89 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksServerProcess.java
@@ -25,11 +25,12 @@
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
abstract class HyracksServerProcess {
- private static final Logger LOGGER = Logger.getLogger(HyracksServerProcess.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected Process process;
protected File configFile = null;
@@ -39,14 +40,14 @@
public void start() throws IOException {
String[] cmd = buildCommand();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Starting command: " + Arrays.toString(cmd));
}
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.redirectErrorStream(true);
if (logFile != null) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Logging to: " + logFile.getCanonicalPath());
}
logFile.getParentFile().mkdirs();
@@ -55,7 +56,7 @@
}
pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logFile));
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Logfile not set, subprocess will output to stdout");
}
}
@@ -67,8 +68,8 @@
process.destroy();
try {
boolean success = process.waitFor(30, TimeUnit.SECONDS);
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Killing unresponsive NC Process");
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Killing unresponsive NC Process");
}
if (!success) {
process.destroyForcibly();
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
index c3bdb2a..d12c05b 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
@@ -22,13 +22,11 @@
import java.io.IOException;
import java.net.InetAddress;
import java.util.Iterator;
-import java.util.logging.Logger;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import junit.framework.Assert;
-import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
@@ -37,6 +35,8 @@
import org.apache.http.util.EntityUtils;
import org.apache.hyracks.server.process.HyracksVirtualCluster;
import org.apache.hyracks.util.file.FileUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -47,7 +47,7 @@
private static final String LOG_DIR = FileUtil.joinPath(TARGET_DIR, "failsafe-reports");
private static final String RESOURCE_DIR = FileUtil.joinPath(TARGET_DIR, "test-classes", "NCServiceIT");
private static final String APP_HOME = FileUtil.joinPath(TARGET_DIR, "appassembler");
- private static final Logger LOGGER = Logger.getLogger(NCServiceIT.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static HyracksVirtualCluster cluster = null;
@@ -139,7 +139,7 @@
setUp();
} catch (Exception e) {
e.printStackTrace();
- LOGGER.severe("TEST CASE(S) FAILED");
+ LOGGER.error("TEST CASE(S) FAILED");
} finally {
tearDown();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
index 2b3f3cb..1d52034 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
@@ -90,5 +90,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java
index 9f648c5..f62860a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/build/IndexBuilder.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.storage.am.common.build;
import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -36,9 +34,12 @@
import org.apache.hyracks.storage.common.IStorageManager;
import org.apache.hyracks.storage.common.LocalResource;
import org.apache.hyracks.storage.common.file.IResourceIdFactory;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IndexBuilder implements IIndexBuilder {
- private static final Logger LOGGER = Logger.getLogger(IndexBuilder.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final INCServiceContext ctx;
protected final IStorageManager storageManager;
@@ -85,7 +86,7 @@
//The reason for this is to handle many cases such as:
//1. Crash while delete index is running (we don't do global cleanup on restart)
//2. Node leaves and then join with old data
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Removing existing index on index create for the index: " + resourceRef.getRelativePath());
lcManager.unregister(resourceRef.getRelativePath());
index.destroy();
@@ -95,7 +96,7 @@
// This is another big problem that we need to disallow soon
// We can only disallow this if we have a global cleanup after crash
// on reboot
- LOGGER.log(Level.WARNING,
+ LOGGER.log(Level.WARN,
"Deleting " + resourceRef.getRelativePath()
+ " on index create. The index is not registered"
+ " but the file exists in the filesystem");
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
index abfe0bb..520dbc0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
@@ -19,9 +19,6 @@
package org.apache.hyracks.storage.am.common.dataflow;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -33,10 +30,13 @@
import org.apache.hyracks.storage.common.IResourceLifecycleManager;
import org.apache.hyracks.storage.common.IStorageManager;
import org.apache.hyracks.storage.common.LocalResource;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IndexDataflowHelper implements IIndexDataflowHelper {
- private static final Logger LOGGER = Logger.getLogger(IndexDataflowHelper.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final INCServiceContext ctx;
private final IResourceLifecycleManager<IIndex> lcManager;
private final ILocalResourceRepository localResourceRepository;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
index ea1635a..c59c1a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
@@ -27,8 +27,6 @@
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -36,10 +34,12 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(IndexDropOperatorNodePushable.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final long DROP_ATTEMPT_WAIT_TIME_MILLIS = TimeUnit.SECONDS.toMillis(1);
private final IIndexDataflowHelper indexHelper;
private final Set<DropOption> options;
@@ -83,11 +83,11 @@
return;
} catch (HyracksDataException e) {
if (isIgnorable(e)) {
- LOGGER.log(Level.INFO, e, () -> "Ignoring exception on drop");
+ LOGGER.info("Ignoring exception on drop", e);
return;
}
if (canRetry(e)) {
- LOGGER.log(Level.INFO, e, () -> "Retrying drop on exception");
+ LOGGER.info( "Retrying drop on exception", e);
continue;
}
throw e;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
index 132149b..1e5f69b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
@@ -21,8 +21,6 @@
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.comm.VSizeFrame;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -50,10 +48,13 @@
import org.apache.hyracks.storage.common.IIndexCursor;
import org.apache.hyracks.storage.common.ISearchOperationCallback;
import org.apache.hyracks.storage.common.ISearchPredicate;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class IndexSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- static final Logger LOGGER = Logger.getLogger(IndexSearchOperatorNodePushable.class.getName());
+ static final Logger LOGGER = LogManager.getLogger();
protected final IHyracksTaskContext ctx;
protected final IIndexDataflowHelper indexHelper;
protected FrameTupleAccessor accessor;
@@ -299,7 +300,7 @@
try {
nonMatchWriter.writeMissing(out);
} catch (Exception e) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
nullTuple.addFieldEndOffset();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
index 84a2e95..7ef4a98 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
@@ -86,5 +86,9 @@
<artifactId>hyracks-util</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
index bb27236..26c7b0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMDiskComponent.java
@@ -18,9 +18,6 @@
*/
package org.apache.hyracks.storage.am.lsm.common.impls;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter;
@@ -30,10 +27,13 @@
import org.apache.hyracks.storage.am.lsm.common.util.ComponentUtils;
import org.apache.hyracks.storage.am.lsm.common.util.LSMComponentIdUtils;
import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractLSMDiskComponent extends AbstractLSMComponent implements ILSMDiskComponent {
- private static final Logger LOGGER = Logger.getLogger(AbstractLSMDiskComponent.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final DiskComponentMetadata metadata;
@@ -129,7 +129,7 @@
// However, we cannot throw an exception here to be compatible with legacy datasets.
// In this case, the disk component would always get a garbage Id [-1, -1], which makes the
// component Id-based optimization useless but still correct.
- LOGGER.warning("Component Id not found from disk component metadata");
+ LOGGER.warn("Component Id not found from disk component metadata");
}
return componentId;
}
@@ -144,7 +144,7 @@
@Override
public void markAsValid(boolean persist) throws HyracksDataException {
ComponentUtils.markAsValid(getMetadataHolder(), persist);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Marked as valid component with id: " + getId());
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
index dc808ad..6115ba6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
@@ -27,8 +27,6 @@
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.exceptions.ErrorCode;
@@ -71,9 +69,12 @@
import org.apache.hyracks.storage.common.ISearchOperationCallback;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractLSMIndex implements ILSMIndex {
- private static final Logger LOGGER = Logger.getLogger(AbstractLSMIndex.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final ILSMHarness lsmHarness;
protected final IIOManager ioManager;
protected final ILSMIOOperationScheduler ioScheduler;
@@ -694,7 +695,7 @@
if (opCtx.getOperation() == IndexOperation.DELETE_MEMORY_COMPONENT) {
return EmptyComponent.INSTANCE;
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
FlushOperation flushOp = (FlushOperation) operation;
LOGGER.log(Level.INFO, "Flushing component with id: " + flushOp.getFlushingComponent().getId());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
index 15cf8e5..c0bef7d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMMemoryComponent.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.storage.am.lsm.common.impls;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilter;
@@ -31,10 +29,13 @@
import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
import org.apache.hyracks.storage.am.lsm.common.util.LSMComponentIdUtils;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public abstract class AbstractLSMMemoryComponent extends AbstractLSMComponent implements ILSMMemoryComponent {
- private static final Logger LOGGER = Logger.getLogger(AbstractLSMMemoryComponent.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final IVirtualBufferCache vbc;
private final AtomicBoolean isModified;
private int writerCount;
@@ -280,7 +281,7 @@
throw new IllegalStateException(
this + " receives illegal id. Old id " + this.componentId + ", new id " + componentId);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Component Id was reset from " + this.componentId + " to " + componentId);
}
this.componentId = componentId;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java
index 98c1560..ee7afa0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ComponentReplacementContext.java
@@ -21,8 +21,6 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -37,9 +35,12 @@
import org.apache.hyracks.storage.common.ISearchOperationCallback;
import org.apache.hyracks.storage.common.ISearchPredicate;
import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ComponentReplacementContext implements ILSMIndexOperationContext {
- private static final Logger LOGGER = Logger.getLogger(ComponentReplacementContext.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final List<ILSMComponent> components;
private final List<ILSMComponent> diskComponents;
private final List<ILSMComponentId> replacedComponentIds;
@@ -166,7 +167,7 @@
}
if (!found) {
// component has been merged?
- LOGGER.log(Level.WARNING, "Memory Component with id = " + replacedComponentIds.get(i)
+ LOGGER.log(Level.WARN, "Memory Component with id = " + replacedComponentIds.get(i)
+ " was flushed and merged before search cursor replaces it");
return false;
}
@@ -198,7 +199,7 @@
ctx.getComponentHolder().add(swapIndexes[i], diskComponents.get(i));
}
} catch (Exception e) {
- LOGGER.log(Level.WARNING, "Failure replacing memory components with disk components", e);
+ LOGGER.log(Level.WARN, "Failure replacing memory components with disk components", e);
throw e;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
index 20f9f6a..d9d3a07 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.storage.am.lsm.common.impls;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -41,9 +39,11 @@
import org.apache.hyracks.storage.common.IIndexCursor;
import org.apache.hyracks.storage.common.ISearchPredicate;
import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ExternalIndexHarness extends LSMHarness {
- private static final Logger LOGGER = Logger.getLogger(ExternalIndexHarness.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public ExternalIndexHarness(ILSMIndex lsmIndex, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
boolean replicationEnabled) {
@@ -230,7 +230,7 @@
@Override
public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
}
@@ -243,7 +243,7 @@
exitComponents(ctx, LSMOperationType.MERGE, newComponent, false);
operation.getCallback().afterFinalize(LSMIOOperationType.MERGE, newComponent);
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished the merge operation for index: " + lsmIndex);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 5368591..15ed0ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -24,8 +24,6 @@
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -58,9 +56,12 @@
import org.apache.hyracks.storage.common.ISearchPredicate;
import org.apache.hyracks.util.trace.ITracer;
import org.apache.hyracks.util.trace.ITracer.Scope;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LSMHarness implements ILSMHarness {
- private static final Logger LOGGER = Logger.getLogger(LSMHarness.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final ILSMIndex lsmIndex;
protected final ComponentReplacementContext componentReplacementCtx;
@@ -195,8 +196,8 @@
}
entranceSuccessful = numEntered == components.size();
} catch (Throwable e) { // NOSONAR: Log and re-throw
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, opType.name() + " failed to enter components on " + lsmIndex, e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, opType.name() + " failed to enter components on " + lsmIndex, e);
}
throw e;
} finally {
@@ -270,8 +271,8 @@
ctx.setAccessingComponents(false);
exitOperation(ctx, opType, newComponent, failedOperation);
} catch (Throwable e) { // NOSONAR: Log and re-throw
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, e.getMessage(), e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, e.getMessage(), e);
}
throw e;
} finally {
@@ -324,8 +325,8 @@
c.deactivateAndDestroy();
}
} catch (Throwable e) { // NOSONAR Log and re-throw
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Failure scheduling replication or destroying merged component", e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Failure scheduling replication or destroying merged component", e);
}
throw e; // NOSONAR: The last call in the finally clause
}
@@ -557,7 +558,7 @@
@Override
public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Started a flush operation for index: " + lsmIndex + " ...");
}
try {
@@ -569,8 +570,8 @@
newComponent.markAsValid(lsmIndex.isDurable());
} catch (Throwable e) { // NOSONAR Log and re-throw
failedOperation = true;
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Flush failed on " + lsmIndex, e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, "Flush failed on " + lsmIndex, e);
}
throw e;
} finally {
@@ -586,7 +587,7 @@
opTracker.completeOperation(lsmIndex, LSMOperationType.FLUSH, ctx.getSearchOperationCallback(),
ctx.getModificationCallback());
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished the flush operation for index: " + lsmIndex);
}
}
@@ -618,7 +619,7 @@
@Override
public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
}
try {
@@ -631,8 +632,8 @@
newComponent.markAsValid(lsmIndex.isDurable());
} catch (Throwable e) { // NOSONAR: Log and re-throw
failedOperation = true;
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Failed merge operation on " + lsmIndex, e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, "Failed merge operation on " + lsmIndex, e);
}
throw e;
} finally {
@@ -658,7 +659,7 @@
opTracker.completeOperation(lsmIndex, LSMOperationType.MERGE, ctx.getSearchOperationCallback(),
ctx.getModificationCallback());
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Finished the merge operation for index: " + lsmIndex);
}
}
@@ -751,8 +752,8 @@
processor.finish();
}
} catch (HyracksDataException e) {
- if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Failed to process frame", e);
+ if (LOGGER.isErrorEnabled()) {
+ LOGGER.log(Level.ERROR, "Failed to process frame", e);
}
throw e;
} finally {
@@ -806,8 +807,8 @@
opTracker.wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex,
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex,
e);
}
}
@@ -891,7 +892,7 @@
try {
opTracker.wait(); // NOSONAR: OpTracker is always synchronized here
} catch (InterruptedException e) {
- LOGGER.log(Level.WARNING, "Interrupted while attempting component level delete", e);
+ LOGGER.log(Level.WARN, "Interrupted while attempting component level delete", e);
Thread.currentThread().interrupt();
throw HyracksDataException.create(e);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java
index 1b827b7..3179790 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/MemoryComponentMetadata.java
@@ -20,8 +20,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -31,9 +29,12 @@
import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
import org.apache.hyracks.storage.am.lsm.common.api.IComponentMetadata;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class MemoryComponentMetadata implements IComponentMetadata {
- private static final Logger LOGGER = Logger.getLogger(MemoryComponentMetadata.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final byte[] empty = new byte[0];
private final List<org.apache.commons.lang3.tuple.Pair<IValueReference, ArrayBackedValueStorage>> store =
new ArrayList<>();
@@ -77,7 +78,7 @@
LOGGER.log(Level.INFO, "Copying Metadata into a different component");
ITreeIndexMetadataFrame frame = mdpManager.createMetadataFrame();
for (Pair<IValueReference, ArrayBackedValueStorage> pair : store) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Copying " + pair.getKey() + " : " + pair.getValue().getLength() + " bytes");
}
mdpManager.put(frame, pair.getKey(), pair.getValue());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
index da3e986..506dcea 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/SynchronousScheduler.java
@@ -18,15 +18,15 @@
*/
package org.apache.hyracks.storage.am.lsm.common.impls;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class SynchronousScheduler implements ILSMIOOperationScheduler {
- private static final Logger LOGGER = Logger.getLogger(SynchronousScheduler.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final SynchronousScheduler INSTANCE = new SynchronousScheduler();
private SynchronousScheduler() {
@@ -37,7 +37,7 @@
try {
operation.call();
} catch (Exception e) {
- LOGGER.log(Level.SEVERE, "IO Operation failed", e);
+ LOGGER.log(Level.ERROR, "IO Operation failed", e);
throw HyracksDataException.create(e);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java
index 4cfc3b6..5fa6b4f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/TracedIOOperation.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.storage.am.lsm.common.impls;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
@@ -29,10 +27,12 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
import org.apache.hyracks.util.trace.ITracer;
import org.apache.hyracks.util.trace.ITracer.Scope;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
class TracedIOOperation implements ILSMIOOperation {
- static final Logger LOGGER = Logger.getLogger(TracedIOOperation.class.getName());
+ static final Logger LOGGER = LogManager.getLogger();
protected final ILSMIOOperation ioOp;
private final LSMIOOperationType ioOpType;
@@ -128,7 +128,7 @@
if (myIoOp instanceof Comparable && other instanceof ComparableTracedIOOperation) {
return ((Comparable) myIoOp).compareTo(((ComparableTracedIOOperation) other).getIoOp());
}
- LOGGER.warning("Comparing ioOps of type " + myIoOp.getClass().getSimpleName() + " and "
+ LOGGER.warn("Comparing ioOps of type " + myIoOp.getClass().getSimpleName() + " and "
+ other.getClass().getSimpleName() + " in " + getClass().getSimpleName());
return Integer.signum(hashCode() - other.hashCode());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java
index 3a22793..d192351 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/VirtualBufferCache.java
@@ -25,8 +25,6 @@
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -42,9 +40,12 @@
import org.apache.hyracks.storage.common.file.FileMapManager;
import org.apache.hyracks.storage.common.file.IFileMapManager;
import org.apache.hyracks.util.JSONUtil;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class VirtualBufferCache implements IVirtualBufferCache {
- private static final Logger LOGGER = Logger.getLogger(VirtualBufferCache.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private final ICacheMemoryAllocator allocator;
private final IFileMapManager fileMapManager;
@@ -125,7 +126,7 @@
}
private void logStats() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Free (allocated) pages = " + freePages.size() + ". Budget = " + pageBudget
+ ". Large pages = " + largePages.get() + ". Overall usage = " + used.get());
}
@@ -187,7 +188,7 @@
bucket.bucketLock.unlock();
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.log(Level.INFO, "Reclaimed pages = " + reclaimedPages);
}
logStats();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java
index 0097a37..94a3702 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/ComponentUtils.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.storage.am.lsm.common.util;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IPointable;
@@ -34,10 +32,13 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMemoryComponent;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ComponentUtils {
- private static final Logger LOGGER = Logger.getLogger(ComponentUtils.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
public static final MutableArrayValueReference MARKER_LSN_KEY = new MutableArrayValueReference("Marker".getBytes());
public static final long NOT_FOUND = -1L;
@@ -76,40 +77,40 @@
* @throws HyracksDataException
*/
public static void get(ILSMIndex index, IValueReference key, IPointable pointable) throws HyracksDataException {
- boolean loggable = LOGGER.isLoggable(Level.FINE);
+ boolean loggable = LOGGER.isDebugEnabled();
if (loggable) {
- LOGGER.log(Level.FINE, "Getting " + key + " from index " + index);
+ LOGGER.log(Level.DEBUG, "Getting " + key + " from index " + index);
}
// Lock the opTracker to ensure index components don't change
synchronized (index.getOperationTracker()) {
index.getCurrentMemoryComponent().getMetadata().get(key, pointable);
if (pointable.getLength() == 0) {
if (loggable) {
- LOGGER.log(Level.FINE, key + " was not found in mutable memory component of " + index);
+ LOGGER.log(Level.DEBUG, key + " was not found in mutable memory component of " + index);
}
// was not found in the in current mutable component, search in the other in memory components
fromImmutableMemoryComponents(index, key, pointable);
if (pointable.getLength() == 0) {
if (loggable) {
- LOGGER.log(Level.FINE, key + " was not found in all immmutable memory components of " + index);
+ LOGGER.log(Level.DEBUG, key + " was not found in all immmutable memory components of " + index);
}
// was not found in the in all in memory components, search in the disk components
fromDiskComponents(index, key, pointable);
if (loggable) {
if (pointable.getLength() == 0) {
- LOGGER.log(Level.FINE, key + " was not found in all disk components of " + index);
+ LOGGER.log(Level.DEBUG, key + " was not found in all disk components of " + index);
} else {
- LOGGER.log(Level.FINE, key + " was found in disk components of " + index);
+ LOGGER.log(Level.DEBUG, key + " was found in disk components of " + index);
}
}
} else {
if (loggable) {
- LOGGER.log(Level.FINE, key + " was found in the immutable memory components of " + index);
+ LOGGER.log(Level.DEBUG, key + " was found in the immutable memory components of " + index);
}
}
} else {
if (loggable) {
- LOGGER.log(Level.FINE, key + " was found in mutable memory component of " + index);
+ LOGGER.log(Level.DEBUG, key + " was found in mutable memory component of " + index);
}
}
}
@@ -135,13 +136,13 @@
private static void fromDiskComponents(ILSMIndex index, IValueReference key, IPointable pointable)
throws HyracksDataException {
- boolean loggable = LOGGER.isLoggable(Level.FINE);
+ boolean loggable = LOGGER.isDebugEnabled();
if (loggable) {
- LOGGER.log(Level.FINE, "Getting " + key + " from disk components of " + index);
+ LOGGER.log(Level.DEBUG, "Getting " + key + " from disk components of " + index);
}
for (ILSMDiskComponent c : index.getDiskComponents()) {
if (loggable) {
- LOGGER.log(Level.FINE, "Getting " + key + " from disk components " + c);
+ LOGGER.log(Level.DEBUG, "Getting " + key + " from disk components " + c);
}
c.getMetadata().get(key, pointable);
if (pointable.getLength() != 0) {
@@ -152,19 +153,20 @@
}
private static void fromImmutableMemoryComponents(ILSMIndex index, IValueReference key, IPointable pointable) {
- boolean loggable = LOGGER.isLoggable(Level.FINE);
+ boolean loggable = LOGGER.isDebugEnabled();
if (loggable) {
- LOGGER.log(Level.FINE, "Getting " + key + " from immutable memory components of " + index);
+ LOGGER.log(Level.DEBUG, "Getting " + key + " from immutable memory components of " + index);
}
List<ILSMMemoryComponent> memComponents = index.getMemoryComponents();
int numOtherMemComponents = memComponents.size() - 1;
int next = index.getCurrentMemoryComponentIndex();
if (loggable) {
- LOGGER.log(Level.FINE, index + " has " + numOtherMemComponents + " immutable memory components");
+ LOGGER.log(Level.DEBUG, index + " has " + numOtherMemComponents + " immutable memory components");
}
for (int i = 0; i < numOtherMemComponents; i++) {
if (loggable) {
- LOGGER.log(Level.FINE, "trying to get " + key + " from immutable memory components number: " + (i + 1));
+ LOGGER.log(Level.DEBUG,
+ "trying to get " + key + " from immutable memory components number: " + (i + 1));
}
next = next - 1;
if (next < 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java
index 0aeb0b9..9bd873d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/util/IOOperationUtils.java
@@ -18,14 +18,14 @@
*/
package org.apache.hyracks.storage.am.lsm.common.util;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.lsm.common.impls.BlockingIOOperationCallbackWrapper;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class IOOperationUtils {
- private static final Logger LOGGER = Logger.getLogger(IOOperationUtils.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private IOOperationUtils() {
}
@@ -35,7 +35,7 @@
try {
ioCallback.waitForIO();
} catch (InterruptedException e) {
- LOGGER.log(Level.WARNING, "Operation has been interrupted. returning");
+ LOGGER.log(Level.WARN, "Operation has been interrupted. returning");
Thread.currentThread().interrupt();
throw HyracksDataException.create(e);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
index acd2e49..9998e97 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
@@ -88,5 +88,9 @@
<artifactId>hyracks-storage-am-bloomfilter</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
index 030e4fd..01f0f1a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
@@ -20,8 +20,6 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -77,9 +75,12 @@
import org.apache.hyracks.storage.common.MultiComparator;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.util.trace.ITracer;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LSMInvertedIndex extends AbstractLSMIndex implements IInvertedIndex {
- private static final Logger LOGGER = Logger.getLogger(LSMInvertedIndex.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected final IBinaryTokenizerFactory tokenizerFactory;
@@ -178,7 +179,7 @@
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
// Key has already been deleted.
- LOGGER.log(Level.WARNING, "Failure during index delete operation", e);
+ LOGGER.log(Level.WARN, "Failure during index delete operation", e);
throw e;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
index a699075..2a40e4c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
@@ -51,5 +51,9 @@
<artifactId>hyracks-dataflow-common</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index 7167565..6212896 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -36,8 +36,6 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -49,10 +47,13 @@
import org.apache.hyracks.api.util.IoUtil;
import org.apache.hyracks.storage.common.file.BufferedFileHandle;
import org.apache.hyracks.storage.common.file.IFileMapManager;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class BufferCache implements IBufferCacheInternal, ILifeCycleComponent {
- private static final Logger LOGGER = Logger.getLogger(BufferCache.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MAP_FACTOR = 3;
private static final int MIN_CLEANED_COUNT_DIFF = 3;
@@ -76,7 +77,7 @@
private final Queue<BufferCacheHeaderHelper> headerPageCache = new ConcurrentLinkedQueue<>();
//DEBUG
- private Level fileOpsLevel = Level.FINE;
+ private Level fileOpsLevel = Level.DEBUG;
private ArrayList<CachedPage> confiscatedPages;
private Lock confiscateLock;
private HashMap<CachedPage, StackTraceElement[]> confiscatedPagesOwner;
@@ -517,7 +518,7 @@
*/
try {
Thread.sleep(PERIOD_BETWEEN_READ_ATTEMPTS);
- LOGGER.log(Level.WARNING, String.format("Failed to read page. Retrying attempt (%d/%d)", i + 1,
+ LOGGER.log(Level.WARN, String.format("Failed to read page. Retrying attempt (%d/%d)", i + 1,
MAX_PAGE_READ_ATTEMPTS), readException);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
@@ -705,7 +706,7 @@
try {
write(cPage);
} catch (HyracksDataException e) {
- LOGGER.log(Level.WARNING, "Unable to write dirty page", e);
+ LOGGER.log(Level.WARN, "Unable to write dirty page", e);
cleaned = false;
}
if (cleaned) {
@@ -782,8 +783,8 @@
ioManager.close(value.getFileHandle());
}
} catch (HyracksDataException e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Error flushing file id: " + key, e);
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.log(Level.WARN, "Error flushing file id: " + key, e);
}
}
});
@@ -793,7 +794,7 @@
@Override
public int createFile(FileReference fileRef) throws HyracksDataException {
- if (LOGGER.isLoggable(fileOpsLevel)) {
+ if (LOGGER.isEnabled(fileOpsLevel)) {
LOGGER.log(fileOpsLevel, "Creating file: " + fileRef + " in cache: " + this);
}
IoUtil.create(fileRef);
@@ -814,7 +815,7 @@
@Override
public int openFile(FileReference fileRef) throws HyracksDataException {
- if (LOGGER.isLoggable(fileOpsLevel)) {
+ if (LOGGER.isEnabled(fileOpsLevel)) {
LOGGER.log(fileOpsLevel, "Opening file: " + fileRef + " in cache: " + this);
}
int fileId;
@@ -831,7 +832,7 @@
@Override
public void openFile(int fileId) throws HyracksDataException {
- if (LOGGER.isLoggable(fileOpsLevel)) {
+ if (LOGGER.isEnabled(fileOpsLevel)) {
LOGGER.log(fileOpsLevel, "Opening file: " + fileId + " in cache: " + this);
}
synchronized (fileInfoMap) {
@@ -929,11 +930,11 @@
@Override
public void closeFile(int fileId) throws HyracksDataException {
- if (LOGGER.isLoggable(fileOpsLevel)) {
+ if (LOGGER.isEnabled(fileOpsLevel)) {
LOGGER.log(fileOpsLevel, "Closing file: " + fileId + " in cache: " + this);
}
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine(dumpState());
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug(dumpState());
}
synchronized (fileInfoMap) {
@@ -945,7 +946,7 @@
throw new HyracksDataException("Closed fileId: " + fileId + " more times than it was opened.");
}
}
- if (LOGGER.isLoggable(fileOpsLevel)) {
+ if (LOGGER.isEnabled(fileOpsLevel)) {
LOGGER.log(fileOpsLevel, "Closed file: " + fileId + " in cache: " + this);
}
}
@@ -980,7 +981,7 @@
@Override
public void deleteFile(int fileId) throws HyracksDataException {
- if (LOGGER.isLoggable(fileOpsLevel)) {
+ if (LOGGER.isEnabled(fileOpsLevel)) {
LOGGER.log(fileOpsLevel, "Deleting file: " + fileId + " in cache: " + this);
}
synchronized (fileInfoMap) {
@@ -1306,8 +1307,8 @@
}
}
} finally {
- if (cycleCount > PIN_ATTEMPT_CYCLES_WARNING_THRESHOLD && LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Took " + cycleCount + " cycles to find free page in buffer cache. (buffer cache "
+ if (cycleCount > PIN_ATTEMPT_CYCLES_WARNING_THRESHOLD && LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Took " + cycleCount + " cycles to find free page in buffer cache. (buffer cache "
+ "undersized?)" + (DEBUG
? " ; " + (masterPinCount.get() - startingPinCount)
+ " successful pins since start of cycle"
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
index 872ac35..a6a3bc8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
@@ -22,13 +22,13 @@
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class ClockPageReplacementStrategy implements IPageReplacementStrategy {
- private static final Logger LOGGER = Logger.getLogger(ClockPageReplacementStrategy.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final int MAX_UNSUCCESSFUL_CYCLE_COUNT = 3;
private IBufferCacheInternal bufferCache;
@@ -130,8 +130,8 @@
}
if (looped && clockPtr >= startClockPtr) {
cycleCount++;
- if (LOGGER.isLoggable(Level.FINE)) {
- LOGGER.fine("completed " + cycleCount + "/" + MAX_UNSUCCESSFUL_CYCLE_COUNT
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("completed " + cycleCount + "/" + MAX_UNSUCCESSFUL_CYCLE_COUNT
+ " clock cycle(s) without finding victim");
}
if (cycleCount >= MAX_UNSUCCESSFUL_CYCLE_COUNT) {
@@ -219,8 +219,8 @@
}
} else {
// we don't have the budget to resize- proceed anyway, but log
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Exceeding buffer cache budget of " + maxAllowedNumPages + " by "
+ if (LOGGER.isWarnEnabled()) {
+ LOGGER.warn("Exceeding buffer cache budget of " + maxAllowedNumPages + " by "
+ (numPages.get() + delta - maxAllowedNumPages)
+ " pages in order to satisfy large page read");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
index 104e943..6559308 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
@@ -104,5 +104,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
index fbd3950..2a7b978 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
@@ -22,8 +22,6 @@
import static org.junit.Assert.fail;
import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -51,11 +49,13 @@
import org.apache.hyracks.storage.common.IIndexBulkLoader;
import org.apache.hyracks.storage.common.IIndexCursor;
import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
@SuppressWarnings("rawtypes")
public abstract class OrderedIndexExamplesTest {
- protected static final Logger LOGGER = Logger.getLogger(OrderedIndexExamplesTest.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected final Random rnd = new Random(50);
protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
@@ -69,7 +69,7 @@
*/
@Test
public void fixedLengthKeyValueExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Fixed-Length Key,Value Example.");
}
@@ -96,7 +96,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -109,7 +109,7 @@
int f0 = rnd.nextInt() % numInserts;
int f1 = 5;
TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
}
@@ -123,7 +123,7 @@
}
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
@@ -157,7 +157,7 @@
*/
@Test
public void pageSplitTestExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree page split test.");
}
@@ -232,7 +232,7 @@
*/
@Test
public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Composite Key Test");
}
@@ -262,7 +262,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -276,7 +276,7 @@
int f1 = rnd.nextInt() % 1000;
int f2 = 5;
TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
}
@@ -290,7 +290,7 @@
}
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
@@ -322,7 +322,7 @@
*/
@Test
public void varLenKeyValueExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Variable-Length Key,Value Example");
}
@@ -349,7 +349,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -364,7 +364,7 @@
String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Inserting[" + i + "] " + f0 + " " + f1);
}
@@ -378,7 +378,7 @@
}
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
@@ -410,7 +410,7 @@
*/
@Test
public void deleteExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deletion Example");
}
@@ -444,7 +444,7 @@
// Max string length to be generated.
int runs = 3;
for (int run = 0; run < runs; run++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
LOGGER.info("Inserting into tree...");
}
@@ -460,7 +460,7 @@
TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
f0s[i] = f0;
f1s[i] = f1;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Inserting " + i);
}
@@ -476,13 +476,13 @@
insDoneCmp[i] = insDone;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deleting from tree...");
}
int delDone = 0;
for (int i = 0; i < ins; i++) {
TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Deleting " + i);
}
@@ -496,7 +496,7 @@
}
}
if (insDoneCmp[i] != delDone) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
}
@@ -504,7 +504,7 @@
}
}
if (insDone != delDone) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
}
break;
@@ -523,7 +523,7 @@
*/
@Test
public void updateExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Update example");
}
@@ -549,7 +549,7 @@
treeIndex.create();
treeIndex.activate();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
IndexAccessParameters actx =
@@ -565,7 +565,7 @@
String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
keys[i] = f0;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Inserting " + i);
}
@@ -583,7 +583,7 @@
int runs = 3;
for (int run = 0; run < runs; run++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
LOGGER.info("Updating BTree");
}
@@ -591,7 +591,7 @@
// Generate a new random value for f1.
String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Updating " + i);
}
@@ -612,7 +612,7 @@
*/
@Test
public void bulkLoadExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Bulk load example");
}
// Declare fields.
@@ -642,7 +642,7 @@
// Load sorted records.
int ins = 100000;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Bulk loading " + ins + " tuples");
}
long start = System.currentTimeMillis();
@@ -655,7 +655,7 @@
}
bulkLoader.end();
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
}
@@ -688,7 +688,7 @@
*/
@Test
public void bulkOrderVerificationExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Bulk load order verification example");
}
// Declare fields.
@@ -757,7 +757,7 @@
}
protected void orderedScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ordered Scan:");
}
IIndexCursor scanCursor = indexAccessor.createSearchCursor(false);
@@ -768,7 +768,7 @@
scanCursor.next();
ITupleReference frameTuple = scanCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
@@ -779,7 +779,7 @@
protected void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
try {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Disk-Order Scan:");
}
ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
@@ -791,7 +791,7 @@
diskOrderCursor.next();
ITupleReference frameTuple = diskOrderCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
@@ -801,13 +801,13 @@
} catch (UnsupportedOperationException e) {
// Ignore exception because some indexes, e.g. the LSMBTree, don't
// support disk-order scan.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring disk-order scan since it's not supported.");
}
} catch (ClassCastException e) {
// Ignore exception because IIndexAccessor sometimes isn't
// an ITreeIndexAccessor, e.g., for the LSMBTree.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring disk-order scan since it's not supported.");
}
}
@@ -816,7 +816,7 @@
protected void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
ISerializerDeserializer[] fieldSerdes, ITupleReference lowKey, ITupleReference highKey,
ITupleReference minFilterTuple, ITupleReference maxFilterTuple) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
LOGGER.info("Range-Search in: [ " + lowKeyString + ", " + highKeyString + "]");
@@ -837,7 +837,7 @@
rangeCursor.next();
ITupleReference frameTuple = rangeCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
index d73439d..90c64cb 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.storage.am.btree;
import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -35,12 +33,14 @@
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.common.IIndex;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
@SuppressWarnings("rawtypes")
public abstract class OrderedIndexMultiThreadTest {
- protected final Logger LOGGER = Logger.getLogger(OrderedIndexMultiThreadTest.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
// Machine-specific number of threads to use for testing.
protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
@@ -67,7 +67,7 @@
String dataMsg) throws InterruptedException, HyracksDataException {
setUp();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
String indexTypeName = getIndexTypeName();
LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+ "; Workload: " + conf.toString() + ".");
@@ -94,7 +94,7 @@
index.validate();
driver.deinit();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree MultiThread Test Time: " + times[0] + "ms");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
index 84a5df6..3dac0db 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
@@ -20,9 +20,9 @@
package org.apache.hyracks.storage.am.btree;
import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -35,7 +35,7 @@
@SuppressWarnings("rawtypes")
public abstract class OrderedIndexTestDriver {
- protected final Logger LOGGER = Logger.getLogger(OrderedIndexTestDriver.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected static final int numTuplesToInsert = AccessMethodTestsConfig.BTREE_NUM_TUPLES_TO_INSERT;
@@ -58,7 +58,7 @@
@Test
public void oneIntKeyAndValue() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
}
@@ -75,7 +75,7 @@
@Test
public void twoIntKeys() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
}
@@ -97,7 +97,7 @@
@Test
public void twoIntKeysAndValues() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
}
@@ -120,7 +120,7 @@
@Test
public void oneStringKeyAndValue() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
}
@@ -138,7 +138,7 @@
@Test
public void twoStringKeys() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
}
@@ -160,7 +160,7 @@
@Test
public void twoStringKeysAndValues() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
index 1c408fc..665178c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -28,12 +28,9 @@
import java.util.Random;
import java.util.SortedSet;
import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.MutablePair;
import org.apache.commons.lang3.tuple.Pair;
-import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -52,10 +49,12 @@
import org.apache.hyracks.storage.common.IIndexCursor;
import org.apache.hyracks.storage.common.ISearchPredicate;
import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@SuppressWarnings("rawtypes")
public class OrderedIndexTestUtils extends TreeIndexTestUtils {
- private static final Logger LOGGER = Logger.getLogger(OrderedIndexTestUtils.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
@@ -95,7 +94,7 @@
@SuppressWarnings("unchecked")
public void checkRangeSearch(IIndexTestContext ctx, ITupleReference lowKey, ITupleReference highKey,
boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing Range Search.");
}
MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
@@ -143,7 +142,7 @@
}
public void checkPointSearches(IIndexTestContext ictx) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing Point Searches On All Expected Keys.");
}
OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
@@ -254,7 +253,7 @@
int c = 1;
for (CheckTuple checkTuple : checkTuples) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (c % (numTuples / 10) == 0) {
LOGGER.info("Inserting Tuple " + c + "/" + numTuples);
}
@@ -278,7 +277,7 @@
String[] fieldValues = new String[fieldCount];
MutablePair<ITupleReference, ITupleReference> minMax = null;
for (int i = 0; i < numTuples; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
}
@@ -318,7 +317,7 @@
int numKeyFields = ctx.getKeyFieldCount();
String[] fieldValues = new String[fieldCount];
for (int i = 0; i < numTuples; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
}
@@ -387,7 +386,7 @@
// Set values.
setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
}
@@ -417,7 +416,7 @@
checkTuples[idx++] = checkTuple;
}
for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
index 0a7f4db..9304adf 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -28,8 +28,6 @@
import java.util.Collection;
import java.util.Iterator;
import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.MutablePair;
import org.apache.commons.lang3.tuple.Pair;
@@ -44,10 +42,13 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.common.IIndexBulkLoader;
import org.apache.hyracks.storage.common.ISearchPredicate;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@SuppressWarnings("rawtypes")
public abstract class TreeIndexTestUtils {
- private static final Logger LOGGER = Logger.getLogger(TreeIndexTestUtils.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
protected abstract CheckTuple createCheckTuple(int numFields, int numKeyFields);
@@ -116,7 +117,7 @@
@SuppressWarnings("unchecked")
public void checkScan(IIndexTestContext ctx) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing Scan.");
}
ITreeIndexCursor scanCursor = (ITreeIndexCursor) ctx.getIndexAccessor().createSearchCursor(false);
@@ -128,7 +129,7 @@
public void checkDiskOrderScan(IIndexTestContext ctx) throws Exception {
try {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing Disk-Order Scan.");
}
ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) ctx.getIndexAccessor();
@@ -158,19 +159,19 @@
try {
diskOrderCursor.close();
} catch (Exception ex) {
- LOGGER.log(Level.WARNING, "Error during scan cursor close", ex);
+ LOGGER.log(Level.WARN, "Error during scan cursor close", ex);
}
}
} catch (UnsupportedOperationException e) {
// Ignore exception because some indexes, e.g. the LSMTrees, don't
// support disk-order scan.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring disk-order scan since it's not supported.");
}
} catch (ClassCastException e) {
// Ignore exception because IIndexAccessor sometimes isn't
// an ITreeIndexAccessor, e.g., for the LSMBTree.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring disk-order scan since it's not supported.");
}
}
@@ -199,7 +200,7 @@
// Set values.
setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), filtered, fieldValues);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
}
@@ -256,7 +257,7 @@
// Set values.
setIntPayloadFields(fieldValues, numKeyFields, fieldCount);
TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
}
@@ -319,7 +320,7 @@
IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples, false);
int c = 1;
for (CheckTuple checkTuple : checkTuples) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
//if (c % (numTuples / 10) == 0) {
LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
//}
@@ -347,7 +348,7 @@
}
for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
index d6358e3..4a31cd6 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.storage.am.rtree;
import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -53,6 +51,8 @@
import org.apache.hyracks.storage.common.IIndexAccessor;
import org.apache.hyracks.storage.common.IIndexBulkLoader;
import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
@SuppressWarnings("rawtypes")
@@ -64,7 +64,7 @@
RTREE
};
- protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeExamplesTest.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
protected final Random rnd = new Random(50);
protected RTreeType rTreeType;
@@ -82,7 +82,7 @@
*/
@Test
public void twoDimensionsExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Fixed-Length Key,Value Example.");
}
@@ -145,7 +145,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -172,7 +172,7 @@
}
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
@@ -198,7 +198,7 @@
*/
@Test
public void rTreePageSplitTestExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree page split test.");
}
@@ -339,7 +339,7 @@
*/
@Test
public void rStarTreePageSplitTestExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("R*Tree page split test.");
}
@@ -482,7 +482,7 @@
*/
@Test
public void threeDimensionsExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Fixed-Length Key,Value Example.");
}
@@ -549,7 +549,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -577,7 +577,7 @@
}
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
@@ -603,7 +603,7 @@
*/
@Test
public void deleteExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deletion Example");
}
@@ -663,7 +663,7 @@
int runs = 3;
for (int run = 0; run < runs; run++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
LOGGER.info("Inserting into tree...");
}
@@ -702,7 +702,7 @@
insDoneCmp[i] = insDone;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Deleting from tree...");
}
int delDone = 0;
@@ -717,7 +717,7 @@
}
}
if (insDoneCmp[i] != delDone) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
}
@@ -725,7 +725,7 @@
}
}
if (insDone != delDone) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
}
break;
@@ -740,7 +740,7 @@
*/
@Test
public void bulkLoadExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Bulk load example");
}
// Declare fields.
@@ -801,7 +801,7 @@
// Load records.
int numInserts = 10000;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Bulk loading " + numInserts + " tuples");
}
long start = System.currentTimeMillis();
@@ -824,7 +824,7 @@
bulkLoader.end();
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
}
@@ -842,7 +842,7 @@
}
protected void scan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Scan:");
}
ITreeIndexCursor scanCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
@@ -853,7 +853,7 @@
scanCursor.next();
ITupleReference frameTuple = scanCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
@@ -864,7 +864,7 @@
protected void diskOrderScan(IIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
try {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Disk-Order Scan:");
}
ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
@@ -876,7 +876,7 @@
diskOrderCursor.next();
ITupleReference frameTuple = diskOrderCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
@@ -886,13 +886,13 @@
} catch (UnsupportedOperationException e) {
// Ignore exception because some indexes, e.g. the LSMRTree, don't
// support disk-order scan.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring disk-order scan since it's not supported.");
}
} catch (ClassCastException e) {
// Ignore exception because IIndexAccessor sometimes isn't
// an ITreeIndexAccessor, e.g., for the LSMRTree.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring disk-order scan since it's not supported.");
}
}
@@ -901,7 +901,7 @@
protected void rangeSearch(IBinaryComparatorFactory[] cmpFactories, IIndexAccessor indexAccessor,
ISerializerDeserializer[] fieldSerdes, ITupleReference key, ITupleReference minFilterTuple,
ITupleReference maxFilterTuple) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
String kString = TupleUtils.printTuple(key, fieldSerdes);
LOGGER.info("Range-Search using key: " + kString);
}
@@ -921,7 +921,7 @@
rangeCursor.next();
ITupleReference frameTuple = rangeCursor.getTuple();
String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
index 53245ac..cf0e1e4 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.storage.am.rtree;
import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -41,6 +39,8 @@
import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest.RTreeType;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
@SuppressWarnings("rawtypes")
@@ -54,7 +54,7 @@
this.rTreeType = rTreeType;
}
- protected final Logger LOGGER = Logger.getLogger(AbstractRTreeMultiThreadTest.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
// Machine-specific number of threads to use for testing.
protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
@@ -84,7 +84,7 @@
int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException {
setUp();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
String indexTypeName = getIndexTypeName();
LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+ "; Workload: " + conf.toString() + ".");
@@ -120,7 +120,7 @@
long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
driver.deinit();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree MultiThread Test Time: " + times[0] + "ms");
}
@@ -193,7 +193,7 @@
@Test
public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException {
if (!testRstarPolicy) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Integer Keys.");
}
return;
@@ -220,7 +220,7 @@
@Test
public void rstartreeTwoDimensionsDouble() throws Exception {
if (!testRstarPolicy) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Double Keys.");
}
return;
@@ -248,7 +248,7 @@
@Test
public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException {
if (!testRstarPolicy) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring RTree Multithread Test With Four Dimensions With Double Keys.");
}
return;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
index 17e4c09..1f71889 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
@@ -20,10 +20,6 @@
package org.apache.hyracks.storage.am.rtree;
import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.junit.Test;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.data.std.primitive.DoublePointable;
@@ -36,6 +32,9 @@
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.Test;
@SuppressWarnings("rawtypes")
public abstract class AbstractRTreeTestDriver {
@@ -45,7 +44,7 @@
this.testRstarPolicy = testRstarPolicy;
}
- protected final Logger LOGGER = Logger.getLogger(AbstractRTreeTestDriver.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected static final int numTuplesToInsert = AccessMethodTestsConfig.RTREE_NUM_TUPLES_TO_INSERT;
@@ -63,7 +62,7 @@
@Test
public void rtreeTwoDimensionsInt() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
}
@@ -84,7 +83,7 @@
@Test
public void rtreeTwoDimensionsDouble() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
}
@@ -105,7 +104,7 @@
@Test
public void rtreeFourDimensionsDouble() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
}
@@ -130,12 +129,12 @@
@Test
public void rstartreeTwoDimensionsInt() throws Exception {
if (!testRstarPolicy) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
}
return;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
}
@@ -157,12 +156,12 @@
@Test
public void rstartreeTwoDimensionsDouble() throws Exception {
if (!testRstarPolicy) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
}
return;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
}
@@ -184,12 +183,12 @@
@Test
public void rstartreeFourDimensionsDouble() throws Exception {
if (!testRstarPolicy) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Ignoring RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
}
return;
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
index 5a890a4..eb4ea56 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -24,8 +24,6 @@
import java.util.Collection;
import java.util.Iterator;
import java.util.Random;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.ErrorCode;
@@ -42,10 +40,12 @@
import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
import org.apache.hyracks.storage.common.ISearchPredicate;
import org.apache.hyracks.storage.common.MultiComparator;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@SuppressWarnings("rawtypes")
public class RTreeTestUtils extends TreeIndexTestUtils {
- private static final Logger LOGGER = Logger.getLogger(RTreeTestUtils.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private int intPayloadValue = 0;
private double doublePayloadValue = 0.0;
@@ -65,7 +65,7 @@
}
public void checkRangeSearch(IIndexTestContext ictx, ITupleReference key) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing Range Search.");
}
AbstractRTreeTestContext ctx = (AbstractRTreeTestContext) ictx;
@@ -101,7 +101,7 @@
// Set values.
setDoublePayloadFields(fieldValues, numKeyFields, fieldCount);
TupleUtils.createDoubleTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
index 43b0957..1ff64d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
@@ -75,5 +75,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java
index 24b1122..c6652c3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/BloomFilterTest.java
@@ -53,7 +53,7 @@
@Test
public void singleFieldTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING BLOOM FILTER");
}
@@ -111,7 +111,7 @@
@Test
public void multiFieldTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING BLOOM FILTER");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
index 9d1b9be..b7b4639e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/MurmurHashForITupleReferenceTest.java
@@ -21,7 +21,6 @@
import java.nio.ByteBuffer;
import java.util.Random;
-import java.util.logging.Level;
import org.junit.Assert;
import org.junit.Before;
@@ -50,7 +49,7 @@
@Test
public void murmurhashONEIntegerFieldTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING MURMUR HASH ONE INTEGER FIELD");
}
@@ -77,7 +76,7 @@
@Test
public void murmurhashTwoIntegerFieldsTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING MURMUR HASH TWO INTEGER FIELDS");
}
@@ -104,7 +103,7 @@
@Test
public void murmurhashOneStringFieldTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING MURMUR HASH ONE STRING FIELD");
}
@@ -132,7 +131,7 @@
@Test
public void murmurhashThreeStringFieldsTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING MURMUR HASH THREE STRING FIELDS");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
index fdcd96b..b0644e3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/src/test/java/org/apache/hyracks/storage/am/bloomfilter/util/AbstractBloomFilterTest.java
@@ -20,15 +20,16 @@
package org.apache.hyracks.storage.am.bloomfilter.util;
import java.util.Random;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public abstract class AbstractBloomFilterTest {
- protected final Logger LOGGER = Logger.getLogger(BloomFilterTestHarness.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected final BloomFilterTestHarness harness;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index 0ddceac..63b1a38 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -85,5 +85,9 @@
<artifactId>hyracks-data-std</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
index 0a714cf..2943ee9 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
@@ -26,7 +26,6 @@
import java.util.Collections;
import java.util.Random;
import java.util.TreeSet;
-import java.util.logging.Level;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -81,7 +80,7 @@
@Test
public void uniqueIndexTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
}
@@ -157,7 +156,7 @@
@Test
public void nonUniqueIndexTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
}
@@ -231,7 +230,7 @@
@Test
public void nonUniqueFieldPrefixIndexTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
}
@@ -400,14 +399,14 @@
u = ')';
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
}
StringBuilder strBuilder = new StringBuilder();
for (Integer r : expectedResults) {
strBuilder.append(r + " ");
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(strBuilder.toString());
}
}
@@ -416,7 +415,7 @@
if (results.size() == expectedResults.size()) {
for (int k = 0; k < results.size(); k++) {
if (!results.get(k).equals(expectedResults.get(k))) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
LOGGER.info(results.get(k) + " " + expectedResults.get(k));
}
@@ -424,7 +423,7 @@
}
}
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
LOGGER.info("RESULTS: " + results.size());
LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
index f2fab5a..2243ee3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -20,7 +20,6 @@
import java.io.DataOutput;
import java.util.Random;
-import java.util.logging.Level;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -110,7 +109,7 @@
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("INSERTING INTO TREE");
}
@@ -146,7 +145,7 @@
tuple.reset(accessor, 0);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 10000 == 0) {
long end = System.currentTimeMillis();
LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
@@ -166,7 +165,7 @@
TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager,
harness.getFileReference(), btree.getRootPageId());
TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("\n" + stats.toString());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
index 062abae..bc297fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
@@ -19,7 +19,6 @@
package org.apache.hyracks.storage.am.btree;
import java.util.Random;
-import java.util.logging.Level;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -93,7 +92,7 @@
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("INSERTING INTO TREE");
}
@@ -108,7 +107,7 @@
int f0 = rnd.nextInt() % 10000;
int f1 = 5;
TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 10000 == 0) {
long end = System.currentTimeMillis();
LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
@@ -126,12 +125,12 @@
}
long end = System.currentTimeMillis();
long duration = end - start;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("DURATION: " + duration);
}
// Update scan.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("UPDATE SCAN:");
}
// Set the cursor to X latch nodes.
@@ -152,7 +151,7 @@
}
// Ordered scan to verify the values.
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("ORDERED SCAN:");
}
// Set the cursor to X latch nodes.
@@ -163,7 +162,7 @@
scanCursor.next();
ITupleReference tuple = scanCursor.getTuple();
String rec = TupleUtils.printTuple(tuple, recDescSers);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(rec);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
index bd5d5b8..48e8d51 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -21,7 +21,6 @@
import java.io.DataOutput;
import java.util.Random;
-import java.util.logging.Level;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -65,7 +64,7 @@
private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
throws HyracksDataException {
if (print) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
}
}
@@ -152,7 +151,7 @@
// insert records with random calls to compact and compress
for (int i = 0; i < numRecords; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % 100 == 0) {
LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
}
@@ -192,7 +191,7 @@
// delete records with random calls to compact and compress
for (int i = 0; i < numRecords; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % 100 == 0) {
LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java
index 6dcb3a4..576c6bb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/StorageFileAccessTest.java
@@ -22,7 +22,6 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
-import java.util.logging.Level;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
@@ -83,7 +82,7 @@
private void pinRandomPage() {
int pageId = Math.abs(rnd.nextInt() % maxPages);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " PINNING PAGE: " + pageId);
}
@@ -99,7 +98,7 @@
break;
case FTA_READONLY: {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " S LATCHING: " + pageId);
}
page.acquireReadLatch();
@@ -108,7 +107,7 @@
break;
case FTA_WRITEONLY: {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " X LATCHING: " + pageId);
}
page.acquireWriteLatch();
@@ -118,13 +117,13 @@
case FTA_MIXED: {
if (rnd.nextInt() % 2 == 0) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " S LATCHING: " + pageId);
}
page.acquireReadLatch();
latch = LatchType.LATCH_S;
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " X LATCHING: " + pageId);
}
page.acquireWriteLatch();
@@ -149,18 +148,18 @@
if (plPage.latch != null) {
if (plPage.latch == LatchType.LATCH_S) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " S UNLATCHING: " + plPage.pageId);
}
plPage.page.releaseReadLatch();
} else {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " X UNLATCHING: " + plPage.pageId);
}
plPage.page.releaseWriteLatch(true);
}
}
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " UNPINNING PAGE: " + plPage.pageId);
}
@@ -172,7 +171,7 @@
}
private void openFile() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " OPENING FILE: " + fileId);
}
try {
@@ -184,7 +183,7 @@
}
private void closeFile() {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " CLOSING FILE: " + fileId);
}
try {
@@ -203,7 +202,7 @@
while (loopCount < maxLoopCount) {
loopCount++;
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java
index 20c7ff6..64c6038 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/util/AbstractBTreeTest.java
@@ -19,15 +19,15 @@
package org.apache.hyracks.storage.am.btree.util;
-import java.util.logging.Logger;
-
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public abstract class AbstractBTreeTest {
- protected final Logger LOGGER = Logger.getLogger(BTreeTestHarness.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected final BTreeTestHarness harness;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
index 597ce59..af7e9e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
@@ -133,5 +133,9 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
index ecf1f85..0c7eed8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
@@ -73,7 +73,7 @@
*/
@Test
public void additionalFilteringingExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing LSMBTree component filters.");
}
@@ -106,7 +106,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -119,7 +119,7 @@
int f0 = rnd.nextInt() % numInserts;
int f1 = i;
TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if (i % 1000 == 0) {
LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
}
@@ -127,7 +127,7 @@
indexAccessor.insert(tuple);
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java
index 8a1444e..790cddd 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeUpdateInPlaceScanDiskComponentsTest.java
@@ -205,7 +205,7 @@
// keys the cube root of numTuples, etc.
int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
for (int i = 0; i < numTuples; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Generating Tuple " + (i + 1) + "/" + numTuples);
}
@@ -280,7 +280,7 @@
}
for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
index 80677db..803c5cb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
@@ -26,7 +26,6 @@
import java.util.Date;
import java.util.List;
import java.util.Random;
-import java.util.logging.Logger;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -51,9 +50,11 @@
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
import org.apache.hyracks.test.support.TestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class LSMBTreeTestHarness {
- protected static final Logger LOGGER = Logger.getLogger(LSMBTreeTestHarness.class.getName());
+ protected static final Logger LOGGER = LogManager.getLogger();
public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST =
new BTreeLeafFrameType[] { BTreeLeafFrameType.REGULAR_NSM };
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
index e6583fe..6d8929f 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
@@ -107,6 +107,10 @@
<artifactId>hyracks-storage-am-btree</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
index d9ca1154..1a0fd87 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
@@ -22,8 +22,6 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -35,11 +33,13 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
import org.apache.hyracks.storage.common.IIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
public abstract class AbstractInvertedIndexSearchTest extends AbstractInvertedIndexTest {
- protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexSearchTest.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected int NUM_DOC_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_DOC_QUERIES;
protected int NUM_RANDOM_QUERIES = AccessMethodTestsConfig.LSM_INVINDEX_NUM_RANDOM_QUERIES;
@@ -70,7 +70,7 @@
invIndex.validate();
for (IInvertedIndexSearchModifier searchModifier : searchModifiers) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Running searches with: " + searchModifier.toString());
}
LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(), NUM_DOC_QUERIES,
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
index 5197812..757c9d8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
@@ -20,8 +20,6 @@
package org.apache.hyracks.storage.am.lsm.invertedindex.common;
import java.io.IOException;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
@@ -33,11 +31,13 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
import org.apache.hyracks.storage.common.IIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Before;
public abstract class AbstractInvertedIndexTest {
- protected final Logger LOGGER = Logger.getLogger(AbstractInvertedIndexTest.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected final LSMInvertedIndexTestHarness harness = new LSMInvertedIndexTestHarness();
@@ -74,7 +74,7 @@
*/
protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException {
IIndex invIndex = testCtx.getIndex();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Validating index: " + invIndex);
}
// Validate index and compare against expected index.
@@ -92,7 +92,7 @@
protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen)
throws IOException {
for (IInvertedIndexSearchModifier searchModifier : TEST_SEARCH_MODIFIERS) {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Running test workload with: " + searchModifier.toString());
}
LSMInvertedIndexTestUtils.testIndexSearch(testCtx, tupleGen, harness.getRandom(),
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
index e0427e8..3b2641d 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
@@ -21,8 +21,6 @@
import java.io.IOException;
import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
@@ -34,11 +32,13 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.Test;
public class LSMInvertedIndexMultiThreadTest {
- protected final Logger LOGGER = Logger.getLogger(LSMInvertedIndexMultiThreadTest.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
// Machine-specific number of threads to use for testing.
protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
@@ -60,7 +60,7 @@
protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numThreads,
TestWorkloadConf conf, String dataMsg) throws InterruptedException, HyracksDataException {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("LSMInvertedIndex MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+ "; Workload: " + conf.toString() + ".");
}
@@ -75,7 +75,7 @@
testCtx.getIndex().validate();
driver.deinit();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("LSMInvertedIndex MultiThread Test Time: " + times[0] + "ms");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
index 9bfe6e9..a7bf5a7 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
@@ -98,5 +98,9 @@
<artifactId>hyracks-data-std</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
index aefa385..00b3d28 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
@@ -48,7 +48,7 @@
*/
@Test
public void additionalFilteringingExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Testing LSMRTree or LSMRTreeWithAntiMatterTuples component filters.");
}
@@ -116,7 +116,7 @@
treeIndex.activate();
long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("Inserting into tree...");
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
@@ -143,7 +143,7 @@
}
}
long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
index fd910e1..8a5d0c5 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
@@ -26,7 +26,6 @@
import java.util.Date;
import java.util.List;
import java.util.Random;
-import java.util.logging.Logger;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -52,7 +51,6 @@
import org.apache.hyracks.test.support.TestUtils;
public class LSMRTreeTestHarness {
- protected static final Logger LOGGER = Logger.getLogger(LSMRTreeTestHarness.class.getName());
private static final long RANDOM_SEED = 50;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index c984adb..1b34fde 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -86,5 +86,9 @@
<artifactId>hyracks-data-std</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
index d2cc96b..15f69bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -21,7 +21,6 @@
import java.util.ArrayList;
import java.util.Random;
-import java.util.logging.Level;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -78,7 +77,7 @@
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void rangeSearchTest() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
+ if (LOGGER.isInfoEnabled()) {
LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
index 1e49e8a..ed94bf3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
@@ -19,14 +19,14 @@
package org.apache.hyracks.storage.am.rtree.utils;
-import java.util.logging.Logger;
-
import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.junit.After;
import org.junit.Before;
public abstract class AbstractRTreeTest {
- protected final Logger LOGGER = Logger.getLogger(RTreeTestHarness.class.getName());
+ protected final Logger LOGGER = LogManager.getLogger();
protected final RTreeTestHarness harness;
public AbstractRTreeTest() {
diff --git a/hyracks-fullstack/hyracks/hyracks-util/pom.xml b/hyracks-fullstack/hyracks/hyracks-util/pom.xml
index 419166b..52ccf2f 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-util/pom.xml
@@ -71,6 +71,10 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java
index 9a65d72..a1f9346 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/DiskUtil.java
@@ -24,14 +24,15 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import org.apache.commons.lang3.SystemUtils;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class DiskUtil {
- private static final Logger LOGGER = Logger.getLogger(DiskUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private DiskUtil() {
throw new AssertionError("Util class should not be initialized.");
@@ -119,7 +120,7 @@
LOGGER.info(line);
}
} catch (IOException e) {
- LOGGER.log(Level.WARNING, e.getMessage(), e);
+ LOGGER.log(Level.WARN, e.getMessage(), e);
}
}).start();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
index b039227..c54c9dc 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/ExitUtil.java
@@ -18,12 +18,13 @@
*/
package org.apache.hyracks.util;
-import java.util.logging.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
@SuppressWarnings("squid:S1147")
public class ExitUtil {
- private static final Logger LOGGER = Logger.getLogger(ExitUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final ExitThread exitThread = new ExitThread();
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
index dcdf140..158ab66 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
@@ -23,7 +23,9 @@
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
@@ -34,7 +36,7 @@
public class JSONUtil {
- private static final Logger LOGGER = Logger.getLogger(JSONUtil.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private static final String INDENT = "\t";
@@ -63,8 +65,8 @@
try {
return appendObj(new StringBuilder(), om.readTree(str), initialIndent).toString();
} catch (IOException e) {
- LOGGER.finest(String.valueOf(e));
- LOGGER.finest("Could not indent JSON string, returning the input string: " + str);
+ LOGGER.trace(String.valueOf(e));
+ LOGGER.trace("Could not indent JSON string, returning the input string: " + str);
return str;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java
index 410097e..5a8edbd 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/PidHelper.java
@@ -22,12 +22,14 @@
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
public class PidHelper {
- private static final Logger LOGGER = Logger.getLogger(PidHelper.class.getName());
+ private static final Logger LOGGER = LogManager.getLogger();
private PidHelper() {
}
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 28b60d3..44d4f4f 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -184,6 +184,16 @@
<artifactId>args4j</artifactId>
<version>2.33</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-api</artifactId>
+ <version>2.10.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-core</artifactId>
+ <version>2.10.0</version>
+ </dependency>
</dependencies>
</dependencyManagement>