falcon-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shweth...@apache.org
Subject [2/4] FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by Jean-Baptiste Onofré
Date Wed, 21 May 2014 07:21:02 GMT
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
----------------------------------------------------------------------
diff --git a/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java b/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
index 314e27b..29c77ce 100644
--- a/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
+++ b/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
@@ -19,10 +19,11 @@
 package org.apache.falcon.aspect;
 
 import org.apache.falcon.util.ResourcesReflectionUtil;
-import org.apache.log4j.Logger;
 import org.aspectj.lang.ProceedingJoinPoint;
 import org.aspectj.lang.annotation.Around;
 import org.aspectj.lang.annotation.Aspect;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -34,7 +35,7 @@ import java.util.Map;
 @Aspect
 public abstract class AbstractFalconAspect {
 
-    private static final Logger LOG = Logger.getLogger(AbstractFalconAspect.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractFalconAspect.class);
 
     @Around("@annotation(org.apache.falcon.monitors.Monitored)")
     public Object logAround(ProceedingJoinPoint joinPoint) throws Throwable {
@@ -74,8 +75,7 @@ public abstract class AbstractFalconAspect {
         Map<String, String> dimensions = new HashMap<String, String>();
 
         if (ResourcesReflectionUtil.getResourceDimensionsName(methodName) == null) {
-            LOG.warn("Class for method name: " + methodName
-                    + " is not added to ResourcesReflectionUtil");
+            LOG.warn("Class for method name: {} is not added to ResourcesReflectionUtil", methodName);
         } else {
             for (Map.Entry<Integer, String> param : ResourcesReflectionUtil
                     .getResourceDimensionsName(methodName).entrySet()) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
----------------------------------------------------------------------
diff --git a/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java b/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
index 41a52bf..77d0d30 100644
--- a/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
+++ b/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
@@ -19,16 +19,17 @@
 package org.apache.falcon.plugin;
 
 import org.apache.falcon.aspect.ResourceMessage;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Plugin for logging metrics using log4j.
  */
 public class LoggingPlugin implements MonitoringPlugin {
-    private static final Logger METRIC = Logger.getLogger("METRIC");
+    private static final Logger METRIC = LoggerFactory.getLogger("METRIC");
 
     @Override
     public void monitor(ResourceMessage message) {
-        METRIC.info(message);
+        METRIC.info("{}", message);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie-el-extensions/pom.xml
----------------------------------------------------------------------
diff --git a/oozie-el-extensions/pom.xml b/oozie-el-extensions/pom.xml
index 0c8d135..8f7695e 100644
--- a/oozie-el-extensions/pom.xml
+++ b/oozie-el-extensions/pom.xml
@@ -62,8 +62,13 @@
         </dependency>
 
         <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <scope>test</scope>
         </dependency>
 
         <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java b/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
index ae4dd12..a685faf 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
@@ -24,7 +24,8 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -33,7 +34,7 @@ import java.io.IOException;
  */
 public class DefaultTaskLogRetriever extends Configured implements TaskLogURLRetriever {
 
-    private static final Logger LOG = Logger.getLogger(DefaultTaskLogRetriever.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultTaskLogRetriever.class);
 
     @Override
     public String retrieveTaskLogURL(String jobId) throws IOException {
@@ -42,7 +43,7 @@ public class DefaultTaskLogRetriever extends Configured implements TaskLogURLRet
 
         RunningJob job = jobClient.getJob(JobID.forName(jobId));
         if (job == null) {
-            LOG.warn("No running job for job id: " + jobId);
+            LOG.warn("No running job for job id: {}", jobId);
             return getFromHistory(jobId);
         }
         TaskCompletionEvent[] tasks = job.getTaskCompletionEvents(0);
@@ -51,7 +52,7 @@ public class DefaultTaskLogRetriever extends Configured implements TaskLogURLRet
             return tasks[1].getTaskTrackerHttp() + "/tasklog?attemptid="
                     + tasks[1].getTaskAttemptId() + "&all=true";
         } else {
-            LOG.warn("No running task for job: " + jobId);
+            LOG.warn("No running task for job: {}", jobId);
             return getFromHistory(jobId);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/LogMover.java b/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
index 117aa58..3922b38 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
@@ -31,11 +31,12 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClient;
 import org.apache.oozie.client.OozieClientException;
 import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.client.WorkflowJob;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
@@ -53,7 +54,7 @@ import java.util.Set;
  */
 public class LogMover extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(LogMover.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LogMover.class);
     public static final Set<String> FALCON_ACTIONS =
         new HashSet<String>(Arrays.asList(new String[]{"eviction", "replication", }));
 
@@ -84,7 +85,7 @@ public class LogMover extends Configured implements Tool {
             try {
                 jobInfo = client.getJobInfo(args.subflowId);
             } catch (OozieClientException e) {
-                LOG.error("Error getting jobinfo for: " + args.subflowId, e);
+                LOG.error("Error getting jobinfo for: {}", args.subflowId, e);
                 return 0;
             }
 
@@ -115,14 +116,13 @@ public class LogMover extends Configured implements Tool {
                             || action.getType().equals("java")) {
                         copyTTlogs(fs, path, action);
                     } else {
-                        LOG.info("Ignoring hadoop TT log for non-pig and non-java action:"
-                                + action.getName());
+                        LOG.info("Ignoring hadoop TT log for non-pig and non-java action: {}", action.getName());
                     }
                 }
             }
 
         } catch (Exception e) {
-            LOG.error("Exception in log mover:", e);
+            LOG.error("Exception in log mover", e);
         }
         return 0;
     }
@@ -137,20 +137,19 @@ public class LogMover extends Configured implements Tool {
         InputStream in = new ByteArrayInputStream(client.getJobLog(id).getBytes());
         OutputStream out = fs.create(new Path(path, "oozie.log"));
         IOUtils.copyBytes(in, out, 4096, true);
-        LOG.info("Copied oozie log to " + path);
+        LOG.info("Copied oozie log to {}", path);
     }
 
     private void copyTTlogs(FileSystem fs, Path path,
                             WorkflowAction action) throws Exception {
         String ttLogURL = getTTlogURL(action.getExternalId());
         if (ttLogURL != null) {
-            LOG.info("Fetching log for action: " + action.getExternalId()
-                    + " from url: " + ttLogURL);
+            LOG.info("Fetching log for action: {} from url: {}", action.getExternalId(), ttLogURL);
             InputStream in = getURLinputStream(new URL(ttLogURL));
             OutputStream out = fs.create(new Path(path, action.getName() + "_"
                     + getMappedStatus(action.getStatus()) + ".log"));
             IOUtils.copyBytes(in, out, 4096, true);
-            LOG.info("Copied log to " + path);
+            LOG.info("Copied log to {}", path);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java b/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
index 0c338da..4ed8f52 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
@@ -32,9 +32,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClientException;
 import org.mortbay.log.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -42,7 +43,7 @@ import java.io.IOException;
  * Get oozie action execution logs corresponding to a run as saved by the log mover.
  */
 public final class LogProvider {
-    private static final Logger LOG = Logger.getLogger(LogProvider.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LogProvider.class);
 
     public Instance populateLogUrls(Entity entity, Instance instance,
                                     String runId) throws FalconException {
@@ -86,7 +87,7 @@ public final class LogProvider {
                 // order of runs
                 return runs[runs.length - 1].getPath().getName();
             } else {
-                LOG.warn("No run dirs are available in logs dir:" + jobPath);
+                LOG.warn("No run dirs are available in logs dir: {}", jobPath);
                 return "-";
             }
         } else {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java b/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
index 11dc1e4..c61e089 100644
--- a/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
+++ b/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
@@ -32,7 +32,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -42,7 +43,7 @@ import java.util.Properties;
  * Host shared libraries in oozie shared lib dir upon creation or modification of cluster.
  */
 public class SharedLibraryHostingService implements ConfigurationChangeListener {
-    private static final Logger LOG = Logger.getLogger(SharedLibraryHostingService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SharedLibraryHostingService.class);
 
     private static final String[] LIBS = StartupProperties.get().getProperty("shared.libs").split(",");
 
@@ -84,7 +85,7 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
             pushLibsToHDFS(properties.getProperty("libext.process.paths"),
                     new Path(libext, EntityType.PROCESS.name()) , cluster, null);
         } catch (IOException e) {
-            LOG.error("Failed to copy shared libs to cluster " + cluster.getName(), e);
+            LOG.error("Failed to copy shared libs to cluster {}", cluster.getName(), e);
         }
     }
 
@@ -94,7 +95,7 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
             return;
         }
 
-        LOG.debug("Copying libs from " + src);
+        LOG.debug("Copying libs from {}", src);
         FileSystem fs;
         try {
             fs = getFileSystem(cluster);
@@ -132,7 +133,7 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
                     }
                 }
                 fs.copyFromLocalFile(false, true, new Path(file.getAbsolutePath()), targetFile);
-                LOG.info("Copied " + file.getAbsolutePath() + " to " + targetFile.toString() + " in " + fs.getUri());
+                LOG.info("Copied {} to {} in {}", file.getAbsolutePath(), targetFile.toString(), fs.getUri());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java b/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
index fc4eabd..d3befa2 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
@@ -28,7 +28,8 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -37,7 +38,7 @@ import java.util.List;
  * Utility called by oozie workflow engine post workflow execution in parent workflow.
  */
 public class FalconPostProcessing extends Configured implements Tool {
-    private static final Logger LOG = Logger.getLogger(FalconPostProcessing.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FalconPostProcessing.class);
 
     /**
      * Args that the utility understands.
@@ -101,19 +102,19 @@ public class FalconPostProcessing extends Configured implements Tool {
 
         CommandLine cmd = getCommand(args);
 
-        LOG.info("Sending user message " + cmd);
+        LOG.info("Sending user message {}", cmd);
         invokeUserMessageProducer(cmd);
 
         if ("SUCCEEDED".equals(Arg.STATUS.getOptionValue(cmd))) {
-            LOG.info("Recording lineage for " + cmd);
+            LOG.info("Recording lineage for {}", cmd);
             recordLineageMetadata(cmd);
         }
 
         //LogMover doesn't throw exception, a failed log mover will not fail the user workflow
-        LOG.info("Moving logs " + cmd);
+        LOG.info("Moving logs {}", cmd);
         invokeLogProducer(cmd);
 
-        LOG.info("Sending falcon message " + cmd);
+        LOG.info("Sending falcon message {}", cmd);
         invokeFalconMessageProducer(cmd);
 
         return 0;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
index 96b5a64..7616df1 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
@@ -59,8 +59,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBElement;
@@ -88,7 +89,7 @@ import java.util.Set;
  */
 public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBuilder<T> {
 
-    private static final Logger LOG = Logger.getLogger(OozieWorkflowBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieWorkflowBuilder.class);
     protected static final ConfigurationStore CONFIG_STORE = ConfigurationStore.get();
 
     protected static final String NOMINAL_TIME_EL = "${coord:formatTime(coord:nominalTime(), 'yyyy-MM-dd-HH-mm')}";
@@ -272,7 +273,7 @@ public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBui
                 props.put("shouldRecord", "true");
             }
         } catch (FalconException e) {
-            LOG.error("Unable to get Late Process for entity:" + entity, e);
+            LOG.error("Unable to get Late Process for entity: {}", entity, e);
             throw new FalconRuntimException(e);
         }
         props.put("entityName", entity.getName());
@@ -316,11 +317,11 @@ public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBui
             if (LOG.isDebugEnabled()) {
                 StringWriter writer = new StringWriter();
                 marshaller.marshal(jaxbElement, writer);
-                LOG.debug("Writing definition to " + outPath + " on cluster " + cluster.getName());
-                LOG.debug(writer.getBuffer());
+                LOG.debug("Writing definition to {} on cluster {}", outPath, cluster.getName());
+                LOG.debug(writer.getBuffer().toString());
             }
 
-            LOG.info("Marshalled " + jaxbElement.getDeclaredType() + " to " + outPath);
+            LOG.info("Marshalled {} to {}", jaxbElement.getDeclaredType(), outPath);
         } catch (Exception e) {
             throw new FalconException("Unable to marshall app object", e);
         }
@@ -591,7 +592,7 @@ public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBui
             propagateHiveCredentials(cluster, properties);
         }
 
-        LOG.info("Cluster: " + cluster.getName() + ", PROPS: " + properties);
+        LOG.info("Cluster: {}, PROPS: {}", cluster.getName(), properties);
         return properties;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
index d6dd2af..d598097 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
@@ -23,8 +23,9 @@ import org.apache.falcon.entity.ClusterHelper;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.ProxyOozieClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -33,7 +34,7 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public final class OozieClientFactory {
 
-    private static final Logger LOG = Logger.getLogger(OozieClientFactory.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieClientFactory.class);
     private static final String LOCAL_OOZIE = "local";
 
     private static final ConcurrentHashMap<String, ProxyOozieClient> CACHE =
@@ -49,7 +50,7 @@ public final class OozieClientFactory {
         String oozieUrl = ClusterHelper.getOozieUrl(cluster);
         if (!CACHE.containsKey(oozieUrl)) {
             ProxyOozieClient ref = getClientRef(oozieUrl);
-            LOG.info("Caching Oozie client object for " + oozieUrl);
+            LOG.info("Caching Oozie client object for {}", oozieUrl);
             CACHE.putIfAbsent(oozieUrl, ref);
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
index a930bb7..bbed949 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
@@ -28,14 +28,15 @@ import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Service that cleans up artifacts that falcon dropped on hdfs for oozie's use.
  */
 public class OozieHouseKeepingService implements WorkflowEngineActionListener {
 
-    private static final Logger LOG = Logger.getLogger(OozieHouseKeepingService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieHouseKeepingService.class);
 
     @Override
     public void beforeSchedule(Entity entity, String cluster) throws FalconException {
@@ -54,7 +55,7 @@ public class OozieHouseKeepingService implements WorkflowEngineActionListener {
         try {
             Cluster cluster = EntityUtil.getEntity(EntityType.CLUSTER, clusterName);
             Path entityPath = EntityUtil.getBaseStagingPath(cluster, entity);
-            LOG.info("Deleting entity path " + entityPath + " on cluster " + clusterName);
+            LOG.info("Deleting entity path {} on cluster {}", entityPath, clusterName);
 
             Configuration conf = ClusterHelper.getConfiguration(cluster);
             FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
index 9d4103b..57fca0f 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
@@ -40,11 +40,12 @@ import org.apache.falcon.workflow.OozieWorkflowBuilder;
 import org.apache.falcon.workflow.WorkflowBuilder;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.*;
 import org.apache.oozie.client.CoordinatorJob.Timeunit;
 import org.apache.oozie.client.Job.Status;
 import org.apache.oozie.client.rest.RestConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.*;
@@ -55,7 +56,7 @@ import java.util.Map.Entry;
  */
 public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
-    private static final Logger LOG = Logger.getLogger(OozieWorkflowEngine.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieWorkflowEngine.class);
 
     public static final String ENGINE = "oozie";
     private static final BundleJob MISSING = new NullBundleJob();
@@ -110,7 +111,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             if (bundleJob == MISSING) {
                 schedClusters.add(cluster);
             } else {
-                LOG.debug("The entity " + entity.getName() + " is already scheduled on cluster " + cluster);
+                LOG.debug("The entity {} is already scheduled on cluster {}", entity.getName(), cluster);
             }
         }
 
@@ -120,7 +121,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 String[schedClusters.size()]));
             for (Map.Entry<String, Properties> entry : newFlows.entrySet()) {
                 String cluster = entry.getKey();
-                LOG.info("Scheduling " + entity.toShortString() + " on cluster " + cluster);
+                LOG.info("Scheduling {} on cluster {}", entity.toShortString(), cluster);
                 scheduleEntity(cluster, entry.getValue(), entity);
                 commitStagingPath(cluster, entry.getValue().getProperty(OozieClient.BUNDLE_APP_PATH));
             }
@@ -196,7 +197,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                     //Filtering bundles that correspond to deleted entities(endtime is set when an entity is deleted)
                     if (job.getEndTime() == null) {
                         filteredJobs.add(job);
-                        LOG.debug("Found bundle " + job.getId());
+                        LOG.debug("Found bundle {}", job.getId());
                     }
                 }
                 return filteredJobs;
@@ -278,7 +279,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
         List<BundleJob> jobs = findBundles(entity, cluster);
         if (jobs.isEmpty()) {
-            LOG.warn("No active job found for " + entity.getName());
+            LOG.warn("No active job found for {}", entity.getName());
             return "FAILED";
         }
 
@@ -320,16 +321,16 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             //kill all coords
             for (CoordinatorJob coord : job.getCoordinators()) {
                 client.kill(coord.getId());
-                LOG.debug("Killed coord " + coord.getId() + " on cluster " + clusterName);
+                LOG.debug("Killed coord {} on cluster {}", coord.getId(), clusterName);
             }
 
             //set end time of bundle
             client.change(job.getId(), OozieClient.CHANGE_VALUE_ENDTIME + "=" + SchemaHelper.formatDateUTC(new Date()));
-            LOG.debug("Changed end time of bundle " + job.getId() + " on cluster " + clusterName);
+            LOG.debug("Changed end time of bundle {} on cluster {}", job.getId(), clusterName);
 
             //kill bundle
             client.kill(job.getId());
-            LOG.debug("Killed bundle " + job.getId() + " on cluster " + clusterName);
+            LOG.debug("Killed bundle {} on cluster {}", job.getId(), clusterName);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -494,7 +495,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 try {
                     status = performAction(cluster, action, coordinatorAction, props);
                 } catch (FalconException e) {
-                    LOG.warn("Unable to perform action " + action + " on cluster ", e);
+                    LOG.warn("Unable to perform action {} on cluster", action, e);
                     status = WorkflowStatus.ERROR.name();
                     overallStatus = APIResult.Status.PARTIAL;
                 }
@@ -571,7 +572,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                     coordJob = client.getCoordJobInfo(coord.getId(), null, startActionNumber,
                         (lastMaterializedActionNumber - startActionNumber));
                 } catch (OozieClientException e) {
-                    LOG.debug("Unable to get details for coordinator " + coord.getId() + " " + e.getMessage());
+                    LOG.debug("Unable to get details for coordinator {}", coord.getId(), e);
                     throw new FalconException(e);
                 }
 
@@ -675,7 +676,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 org.apache.oozie.client.CoordinatorAction.Status.RUNNING,
                 org.apache.oozie.client.CoordinatorAction.Status.WAITING,
                 org.apache.oozie.client.CoordinatorAction.Status.READY);
-            LOG.info("Rerun job " + coordinatorAction.getId() + " on cluster " + cluster);
+            LOG.info("Rerun job {} on cluster {}", coordinatorAction.getId(), cluster);
         } catch (Exception e) {
             LOG.error("Unable to rerun workflows", e);
             throw new FalconException(e);
@@ -773,7 +774,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                             actions.add(coordActionInfo);
                         }
                     } catch (OozieClientException e) {
-                        LOG.debug("Unable to get action for " + actionId + " " + e.getMessage());
+                        LOG.debug("Unable to get action for {}", actionId, e);
                     }
 
                     Calendar startCal = Calendar.getInstance(EntityUtil.getTimeZone(coord.getTimeZone()));
@@ -860,7 +861,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         boolean wfUpdated = UpdateHelper.isWorkflowUpdated(cluster, newEntity);
 
         if (!entityUpdated && !wfUpdated) {
-            LOG.debug("Nothing to update for cluster " + cluster);
+            LOG.debug("Nothing to update for cluster {}", cluster);
             return null;
         }
 
@@ -869,27 +870,28 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         if (stagingPath != null) {  //update if entity is scheduled
             BundleJob bundle = findBundleForStagingPath(cluster, oldEntity, stagingPath);
             bundle = getBundleInfo(cluster, bundle.getId());
-            LOG.info("Updating entity through Workflow Engine" + newEntity.toShortString());
+            LOG.info("Updating entity through Workflow Engine {}", newEntity.toShortString());
             Date newEndTime = EntityUtil.getEndTime(newEntity, cluster);
             if (newEndTime.before(now())) {
                 throw new FalconException("New end time for " + newEntity.getName() + " is past current time. Entity "
                     + "can't be updated. Use remove and add");
             }
 
-            LOG.debug("Updating for cluster : " + cluster + ", bundle: " + bundle.getId());
+            LOG.debug("Updating for cluster: {}, bundle: {}", cluster, bundle.getId());
 
             if (canUpdateBundle(oldEntity, newEntity, wfUpdated)) {
                 // only concurrency and endtime are changed. So, change coords
-                LOG.info("Change operation is adequate! : " + cluster + ", bundle: " + bundle.getId());
+                LOG.info("Change operation is adequate! : {}, bundle: {}", cluster, bundle.getId());
                 updateCoords(cluster, bundle, EntityUtil.getParallel(newEntity),
                     EntityUtil.getEndTime(newEntity, cluster));
                 return newEndTime;
             }
 
-            LOG.debug("Going to update ! : " + newEntity.toShortString() + "for cluster " + cluster + ", "
-                + "bundle: " + bundle.getId());
+            LOG.debug("Going to update! : {} for cluster {}, bundle: {}",
+                    newEntity.toShortString(), cluster, bundle.getId());
             effectiveTime = updateInternal(oldEntity, newEntity, cluster, bundle, false, effectiveTime);
-            LOG.info("Entity update complete : " + newEntity.toShortString() + cluster + ", bundle: " + bundle.getId());
+            LOG.info("Entity update complete: {} for cluster {}, bundle: {}",
+                    newEntity.toShortString(), cluster, bundle.getId());
         }
 
         //Update affected entities
@@ -899,7 +901,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 continue;
             }
 
-            LOG.info("Dependent entities need to be updated " + affectedEntity.toShortString());
+            LOG.info("Dependent entities need to be updated {}", affectedEntity.toShortString());
             if (!UpdateHelper.shouldUpdate(oldEntity, newEntity, affectedEntity, cluster)) {
                 continue;
             }
@@ -909,17 +911,17 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 continue;
             }
 
-            LOG.info("Triggering update for " + cluster + ", " + affectedProcBundle.getId());
+            LOG.info("Triggering update for {}, {}", cluster, affectedProcBundle.getId());
 
             Date depEndTime =
                 updateInternal(affectedEntity, affectedEntity, cluster, affectedProcBundle, false, effectiveTime);
             if (effectiveTime == null || effectiveTime.after(depEndTime)) {
                 effectiveTime = depEndTime;
             }
-            LOG.info("Entity update complete : " + affectedEntity.toShortString() + cluster + ", "+ "bundle: "
-                + affectedProcBundle.getId());
+            LOG.info("Entity update complete: {} for cluster {}, bundle: {}",
+                    affectedEntity.toShortString(), cluster, affectedProcBundle.getId());
         }
-        LOG.info("Entity update and all dependent entities updated: " + oldEntity.toShortString());
+        LOG.info("Entity update and all dependent entities updated: {}", oldEntity.toShortString());
         return effectiveTime;
     }
 
@@ -964,26 +966,26 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
         // change coords
         for (CoordinatorJob coord : bundle.getCoordinators()) {
-            LOG.debug("Updating endtime of coord " + coord.getId() + " to " + SchemaHelper.formatDateUTC(endTime)
-                + " on cluster " + cluster);
+            LOG.debug("Updating endtime of coord {} to {} on cluster {}",
+                    coord.getId(), SchemaHelper.formatDateUTC(endTime), cluster);
             Date lastActionTime = getCoordLastActionTime(coord);
             if (lastActionTime == null) { // nothing is materialized
-                LOG.info("Nothing is materialized for this coord: " + coord.getId());
+                LOG.info("Nothing is materialized for this coord: {}", coord.getId());
                 if (endTime.compareTo(coord.getStartTime()) <= 0) {
-                    LOG.info("Setting end time to START TIME " + SchemaHelper.formatDateUTC(coord.getStartTime()));
+                    LOG.info("Setting end time to START TIME {}", SchemaHelper.formatDateUTC(coord.getStartTime()));
                     change(cluster, coord.getId(), concurrency, coord.getStartTime(), null);
                 } else {
-                    LOG.info("Setting end time to START TIME " + SchemaHelper.formatDateUTC(endTime));
+                    LOG.info("Setting end time to START TIME {}", SchemaHelper.formatDateUTC(endTime));
                     change(cluster, coord.getId(), concurrency, endTime, null);
                 }
             } else {
-                LOG.info("Actions have materialized for this coord: " + coord.getId() + ", last action "
-                    + SchemaHelper.formatDateUTC(lastActionTime));
+                LOG.info("Actions have materialized for this coord: {}, last action {}",
+                        coord.getId(), SchemaHelper.formatDateUTC(lastActionTime));
                 if (!endTime.after(lastActionTime)) {
                     Date pauseTime = offsetTime(endTime, -1);
                     // set pause time which deletes future actions
-                    LOG.info("Setting pause time on coord : " + coord.getId() + " to " + SchemaHelper.formatDateUTC(
-                        pauseTime));
+                    LOG.info("Setting pause time on coord: {} to {}",
+                            coord.getId(), SchemaHelper.formatDateUTC(pauseTime));
                     change(cluster, coord.getId(), concurrency, null, SchemaHelper.formatDateUTC(pauseTime));
                 }
                 change(cluster, coord.getId(), concurrency, endTime, "");
@@ -1037,16 +1039,16 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             //newBundleId and latestBundle will be null if effectiveTime = process end time
             if (newBundleId != null) {
                 latestBundle = getBundleInfo(cluster, newBundleId);
-                LOG.info("New bundle " + newBundleId + " scheduled successfully with start time "
-                    + SchemaHelper.formatDateUTC(effectiveTime));
+                LOG.info("New bundle {} scheduled successfully with start time {}",
+                        newBundleId, SchemaHelper.formatDateUTC(effectiveTime));
             }
         } else {
-            LOG.info("New bundle has already been created. Bundle Id: " + latestBundle.getId() + ", Start: "
-                + SchemaHelper.formatDateUTC(latestBundle.getStartTime()) + ", End: " + latestBundle.getEndTime());
+            LOG.info("New bundle has already been created. Bundle Id: {}, Start: {}, End: {}", latestBundle.getId(),
+                SchemaHelper.formatDateUTC(latestBundle.getStartTime()), latestBundle.getEndTime());
 
             //pick effectiveTime from already created bundle
             effectiveTime = getMinStartTime(latestBundle);
-            LOG.info("Will set old coord end time to " + SchemaHelper.formatDateUTC(effectiveTime));
+            LOG.info("Will set old coord end time to {}", SchemaHelper.formatDateUTC(effectiveTime));
         }
         if (effectiveTime != null) {
             //set endtime for old coords
@@ -1072,11 +1074,11 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         EntityUtil.setStartDate(entity, cluster, startDate);
         WorkflowBuilder<Entity> builder = WorkflowBuilder.getBuilder(ENGINE, clone);
         Map<String, Properties> bundleProps = builder.newWorkflowSchedule(cluster);
-        LOG.info("Scheduling " + entity.toShortString() + " on cluster " + cluster + " with props " + bundleProps);
+        LOG.info("Scheduling {} on cluster {} with props {}", entity.toShortString(), cluster, bundleProps);
         if (bundleProps != null && bundleProps.size() > 0) {
             return scheduleEntity(cluster, bundleProps.get(cluster), entity);
         } else {
-            LOG.info("No new workflow to be scheduled for this " + entity.toShortString());
+            LOG.info("No new workflow to be scheduled for this {}", entity.toShortString());
             return null;
         }
     }
@@ -1133,7 +1135,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             jobprops.remove(OozieClient.BUNDLE_APP_PATH);
             client.reRun(jobId, jobprops);
             assertStatus(cluster, jobId, Job.Status.RUNNING);
-            LOG.info("Rerun job " + jobId + " on cluster " + cluster);
+            LOG.info("Rerun job {} on cluster {}", jobId, cluster);
         } catch (Exception e) {
             LOG.error("Unable to rerun workflows", e);
             throw new FalconException(e);
@@ -1204,7 +1206,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     private String run(String cluster, Properties props) throws FalconException {
         try {
             String jobId = OozieClientFactory.get(cluster).run(props);
-            LOG.info("Submitted " + jobId + " on cluster " + cluster + " with properties : " + props);
+            LOG.info("Submitted {} on cluster {} with properties: {}", jobId, cluster, props);
             return jobId;
         } catch (OozieClientException e) {
             LOG.error("Unable to schedule workflows", e);
@@ -1217,7 +1219,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             OozieClientFactory.get(cluster).suspend(jobId);
             assertStatus(cluster, jobId, Status.PREPSUSPENDED, Status.SUSPENDED, Status.SUCCEEDED, Status.FAILED,
                 Status.KILLED);
-            LOG.info("Suspended job " + jobId + " on cluster " + cluster);
+            LOG.info("Suspended job {} on cluster {}", jobId, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1227,7 +1229,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         try {
             OozieClientFactory.get(cluster).resume(jobId);
             assertStatus(cluster, jobId, Status.PREP, Status.RUNNING, Status.SUCCEEDED, Status.FAILED, Status.KILLED);
-            LOG.info("Resumed job " + jobId + " on cluster " + cluster);
+            LOG.info("Resumed job {} on cluster {}", jobId, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1237,7 +1239,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         try {
             OozieClientFactory.get(cluster).kill(jobId);
             assertStatus(cluster, jobId, Status.KILLED, Status.SUCCEEDED, Status.FAILED);
-            LOG.info("Killed job " + jobId + " on cluster " + cluster);
+            LOG.info("Killed job {} on cluster {}", jobId, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1246,7 +1248,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     private void change(String cluster, String jobId, String changeValue) throws FalconException {
         try {
             OozieClientFactory.get(cluster).change(jobId, changeValue);
-            LOG.info("Changed bundle/coord " + jobId + ": " + changeValue + " on cluster " + cluster);
+            LOG.info("Changed bundle/coord {}: {} on cluster {}", jobId, changeValue, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1289,9 +1291,9 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 }
                 coord = client.getCoordJobInfo(id);
             }
-            LOG.error("Failed to change coordinator. Current value " + coord.getConcurrency() + ", "
-                + SchemaHelper.formatDateUTC(coord.getEndTime()) + ", " + SchemaHelper.formatDateUTC(
-                    coord.getPauseTime()));
+            LOG.error("Failed to change coordinator. Current value {}, {}, {}",
+                    coord.getConcurrency(), SchemaHelper.formatDateUTC(coord.getEndTime()),
+                    SchemaHelper.formatDateUTC(coord.getPauseTime()));
             throw new FalconException("Failed to change coordinator " + id + " with change value " + changeValueStr);
         } catch (OozieClientException e) {
             throw new FalconException(e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java b/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
index c78a83a..3d0e903 100644
--- a/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
+++ b/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
@@ -24,10 +24,11 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.hadoop.hdfs.web.KerberosUgiAuthenticator;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.Authenticator;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.rest.RestConstants;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStreamReader;
@@ -47,7 +48,7 @@ import java.util.concurrent.Callable;
  */
 public class ProxyOozieClient extends AuthOozieClient {
 
-    private static final Logger LOG = Logger.getLogger(ProxyOozieClient.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ProxyOozieClient.class);
     private static final Map<String, String> NONE = new HashMap<String, String>();
 
     private final Authenticator authenticator = new KerberosUgiAuthenticator();
@@ -78,9 +79,7 @@ public class ProxyOozieClient extends AuthOozieClient {
         throws IOException, OozieClientException {
 
         final URL decoratedUrl = decorateUrlWithUser(url);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("ProxyOozieClient.createConnection: u=" + url + ", m=" + method);
-        }
+        LOG.debug("ProxyOozieClient.createConnection: u={}, m={}", url, method);
 
         UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
         try {
@@ -115,9 +114,7 @@ public class ProxyOozieClient extends AuthOozieClient {
             // strUrl += "&" + RestConstants.DO_AS_PARAM + "=" + CurrentUser.getUser();
 
             url = new URL(strUrl);
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Decorated url with user info: " + url);
-            }
+            LOG.debug("Decorated url with user info: {}", url);
         }
 
         return url;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
----------------------------------------------------------------------
diff --git a/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java b/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
index 5a2e570..881b0c0 100644
--- a/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
+++ b/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
@@ -26,18 +26,18 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.DefaultJobHistoryParser;
 import org.apache.hadoop.mapred.JobHistory;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URLDecoder;
 
 /**
  * Hadoop v1 task log retriever based on job history
  */
 public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
-    private static final Logger LOG = Logger.getLogger(TaskLogRetrieverV1.class);
+    private static final Logger LOG = LoggerFactory.getLogger(TaskLogRetrieverV1.class);
 
     @Override
     public String getFromHistory(String jobId) throws IOException {
@@ -46,8 +46,8 @@ public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
         if (file == null) return null;
         JobHistory.JobInfo jobInfo = new JobHistory.JobInfo(jobId);
         DefaultJobHistoryParser.parseJobTasks(file, jobInfo, new Path(file).getFileSystem(conf));
-        LOG.info("History file:" + file);
-        LOG.debug("Number of tasks in the history file: " + jobInfo.getAllTasks().size());
+        LOG.info("History file: {}", file);
+        LOG.debug("Number of tasks in the history file: {}", jobInfo.getAllTasks().size());
         for (JobHistory.Task task : jobInfo.getAllTasks().values()) {
             if (task.get(JobHistory.Keys.TASK_TYPE).equals(JobHistory.Values.MAP.name()) &&
                     task.get(JobHistory.Keys.TASK_STATUS).equals(JobHistory.Values.SUCCESS.name())) {
@@ -80,7 +80,7 @@ public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
                 file = file.substring(file.lastIndexOf('=') + 1);
                 file = JobHistory.JobInfo.decodeJobHistoryFileName(file);
             } else {
-                LOG.warn("JobURL " + jobUrl + " for id: " + jobId + " returned " + status);
+                LOG.warn("JobURL {} for id: {} returned {}", jobUrl, jobId, status);
             }
             return file;
         } catch (URISyntaxException e) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5a5716e..3550b7b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -102,7 +102,7 @@
 
         <include.prism>true</include.prism>
 
-        <slf4j.version>1.6.1</slf4j.version>
+        <slf4j.version>1.7.5</slf4j.version>
         <oozie.version>4.0.1</oozie.version>
         <oozie.buildversion>${oozie.version}-falcon</oozie.buildversion>
         <oozie.forcebuild>false</oozie.forcebuild>
@@ -511,12 +511,6 @@
             </dependency>
 
             <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-simple</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-
-            <dependency>
                 <groupId>commons-lang</groupId>
                 <artifactId>commons-lang</artifactId>
                 <version>2.6</version>
@@ -525,7 +519,7 @@
             <dependency>
                 <groupId>log4j</groupId>
                 <artifactId>log4j</artifactId>
-                <version>1.2.15</version>
+                <version>1.2.17</version>
                 <scope>compile</scope>
                 <exclusions>
                     <exclusion>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/FalconWebException.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/FalconWebException.java b/prism/src/main/java/org/apache/falcon/FalconWebException.java
index 65cf7d2..d552c07 100644
--- a/prism/src/main/java/org/apache/falcon/FalconWebException.java
+++ b/prism/src/main/java/org/apache/falcon/FalconWebException.java
@@ -22,7 +22,8 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.falcon.resource.APIResult;
 import org.apache.falcon.resource.InstancesResult;
 import org.apache.falcon.resource.InstancesSummaryResult;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
@@ -33,7 +34,7 @@ import javax.ws.rs.core.Response;
  */
 public class FalconWebException extends WebApplicationException {
 
-    private static final Logger LOG = Logger.getLogger(FalconWebException.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FalconWebException.class);
 
     public static FalconWebException newException(Throwable e,
                                                   Response.Status status) {
@@ -49,28 +50,28 @@ public class FalconWebException extends WebApplicationException {
     public static FalconWebException newInstanceSummaryException(Throwable e, Response.Status status) {
         LOG.error("Failure reason", e);
         String message = e.getMessage() + "\n" + getAddnInfo(e);
-        LOG.error("Action failed: " + status + "\nError:" + message);
+        LOG.error("Action failed: {}\nError: {}", status, message);
         APIResult result = new InstancesSummaryResult(APIResult.Status.FAILED, message);
         return new FalconWebException(Response.status(status).entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }
 
     public static FalconWebException newException(APIResult result,
                                                   Response.Status status) {
-        LOG.error("Action failed: " + status + "\nError:" + result.getMessage());
+        LOG.error("Action failed: {}\nError: {}", status, result.getMessage());
         return new FalconWebException(Response.status(status).
                 entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }
 
     public static FalconWebException newException(String message,
                                                   Response.Status status) {
-        LOG.error("Action failed: " + status + "\nError:" + message);
+        LOG.error("Action failed: {}\nError: {}", status, message);
         APIResult result = new APIResult(APIResult.Status.FAILED, message);
         return new FalconWebException(Response.status(status).
                 entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }
 
     public static FalconWebException newInstanceException(String message, Response.Status status) {
-        LOG.error("Action failed: " + status + "\nError:" + message);
+        LOG.error("Action failed: {}\nError: {}", status, message);
         APIResult result = new InstancesResult(APIResult.Status.FAILED, message);
         return new FalconWebException(Response.status(status).entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/Main.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/Main.java b/prism/src/main/java/org/apache/falcon/Main.java
index a425a72..28a3c06 100644
--- a/prism/src/main/java/org/apache/falcon/Main.java
+++ b/prism/src/main/java/org/apache/falcon/Main.java
@@ -26,14 +26,15 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.falcon.util.BuildProperties;
 import org.apache.falcon.util.EmbeddedServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
 
 /**
  * Driver for running Falcon as a standalone server with embedded jetty server.
  */
 public final class Main {
-    private static final Logger LOG = Logger.getLogger(Main.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Main.class);
     private static final String APP_PATH = "app";
     private static final String APP_PORT = "port";
 
@@ -76,7 +77,7 @@ public final class Main {
         if (startActiveMq) {
             String dataDir = System.getProperty("falcon.embeddedmq.data", "target/");
             int mqport = Integer.valueOf(System.getProperty("falcon.embeddedmq.port", "61616"));
-            LOG.info("Starting activemq at port " + mqport + " with data dir " + dataDir);
+            LOG.info("Starting ActiveMQ at port {} with data dir {}", mqport, dataDir);
 
             BrokerService broker = new BrokerService();
             broker.setUseJmx(false);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java b/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
index f22d831..5e879b1 100644
--- a/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
+++ b/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
@@ -24,7 +24,8 @@ import org.apache.falcon.service.ServiceInitializer;
 import org.apache.falcon.util.BuildProperties;
 import org.apache.falcon.util.RuntimeProperties;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
@@ -36,7 +37,7 @@ import java.util.Properties;
  */
 public class ContextStartupListener implements ServletContextListener {
 
-    private static final Logger LOG = Logger.getLogger(ContextStartupListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ContextStartupListener.class);
 
     private final ServiceInitializer startupServices = new ServiceInitializer();
 
@@ -76,7 +77,7 @@ public class ContextStartupListener implements ServletContextListener {
         }
         buffer.append("\n############################################");
         buffer.append("############################################");
-        LOG.info(buffer);
+        LOG.info(buffer.toString());
     }
 
     @Override
@@ -90,6 +91,6 @@ public class ContextStartupListener implements ServletContextListener {
         buffer.append("\n############################################");
         buffer.append("\n         Falcon Server (SHUTDOWN)            ");
         buffer.append("\n############################################");
-        LOG.info(buffer);
+        LOG.info(buffer.toString());
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java b/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
index 72e0302..c695bb7 100644
--- a/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
+++ b/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
@@ -23,8 +23,9 @@ import org.apache.falcon.aspect.AbstractFalconAspect;
 import org.apache.falcon.aspect.ResourceMessage;
 import org.apache.falcon.util.ReflectionUtils;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
 import org.aspectj.lang.annotation.Aspect;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -36,7 +37,7 @@ import java.util.List;
  */
 @Aspect
 public class ChainableMonitoringPlugin extends AbstractFalconAspect implements MonitoringPlugin {
-    private static final Logger LOG = Logger.getLogger(ChainableMonitoringPlugin.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ChainableMonitoringPlugin.class);
 
     private List<MonitoringPlugin> plugins = new ArrayList<MonitoringPlugin>();
 
@@ -47,11 +48,11 @@ public class ChainableMonitoringPlugin extends AbstractFalconAspect implements M
             for (String pluginClass : pluginClasses.split(",")) {
                 MonitoringPlugin plugin = ReflectionUtils.getInstanceByClassName(pluginClass.trim());
                 plugins.add(plugin);
-                LOG.info("Registered Monitoring Plugin " + pluginClass);
+                LOG.info("Registered Monitoring Plugin {}", pluginClass);
             }
         } catch (FalconException e) {
             plugins = Arrays.asList((MonitoringPlugin) new LoggingPlugin());
-            LOG.error("Unable to initialize monitoring plugins: " + pluginClasses, e);
+            LOG.error("Unable to initialize monitoring plugins: {}", pluginClasses, e);
         }
     }
 
@@ -61,7 +62,7 @@ public class ChainableMonitoringPlugin extends AbstractFalconAspect implements M
             try {
                 plugin.monitor(message);
             } catch (Exception e) {
-                LOG.debug("Unable to publish message to " + plugin.getClass(), e);
+                LOG.debug("Unable to publish message to {}", plugin.getClass(), e);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java b/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
index e6f0709..c135470 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
@@ -40,8 +40,9 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.log4j.Logger;
 import org.datanucleus.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.core.Response;
@@ -53,8 +54,8 @@ import java.util.*;
  * A base class for managing Entity operations.
  */
 public abstract class AbstractEntityManager {
-    private static final Logger LOG = Logger.getLogger(AbstractEntityManager.class);
-    private static final Logger AUDIT = Logger.getLogger("AUDIT");
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractEntityManager.class);
+    private static final Logger AUDIT = LoggerFactory.getLogger("AUDIT");
     protected static final int XML_DEBUG_LEN = 10 * 1024;
 
     private AbstractWorkflowEngine workflowEngine;
@@ -173,7 +174,7 @@ public abstract class AbstractEntityManager {
             return new APIResult(APIResult.Status.SUCCEEDED,
                     "Validated successfully (" + entityType + ") " + entity.getName());
         } catch (Throwable e) {
-            LOG.error("Validation failed for entity (" + type + ") ", e);
+            LOG.error("Validation failed for entity ({})", type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -210,7 +211,7 @@ public abstract class AbstractEntityManager {
             return new APIResult(APIResult.Status.SUCCEEDED,
                     entity + "(" + type + ") removed successfully " + removedFromEngine);
         } catch (Throwable e) {
-            LOG.error("Unable to reach workflow engine for deletion or " + "deletion failed", e);
+            LOG.error("Unable to reach workflow engine for deletion or deletion failed", e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -257,7 +258,7 @@ public abstract class AbstractEntityManager {
             return new APIResult(APIResult.Status.SUCCEEDED, entityName + " updated successfully"
                     + (effectiveTimes.isEmpty() ? "" : " with effect from " + effectiveTimes));
         } catch (Throwable e) {
-            LOG.error("Updation failed", e);
+            LOG.error("Update failed", e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         } finally {
             ConfigurationStore.get().cleanupUpdateInit();
@@ -330,7 +331,7 @@ public abstract class AbstractEntityManager {
 
         validate(entity);
         configStore.publish(entityType, entity);
-        LOG.info("Submit successful: (" + type + ")" + entity.getName());
+        LOG.info("Submit successful: ({}): {}", type, entity.getName());
         return entity;
     }
 
@@ -349,7 +350,7 @@ public abstract class AbstractEntityManager {
                 try {
                     xmlStream.reset();
                     String xmlData = getAsString(xmlStream);
-                    LOG.debug("XML DUMP for (" + entityType + "): " + xmlData, e);
+                    LOG.debug("XML DUMP for ({}): {}", entityType, xmlData, e);
                 } catch (IOException ignore) {
                     // ignore
                 }
@@ -374,8 +375,8 @@ public abstract class AbstractEntityManager {
         if (request == null) {
             return; // this must be internal call from Falcon
         }
-        AUDIT.info("Performed " + action + " on " + entity + "(" + type + ") :: " + request.getRemoteHost() + "/"
-                + CurrentUser.getUser());
+        AUDIT.info("Performed {} on {} ({}) :: {}/{}",
+                action, entity, type, request.getRemoteHost(), CurrentUser.getUser());
     }
 
     private enum EntityStatus {
@@ -402,7 +403,7 @@ public abstract class AbstractEntityManager {
             throw e;
         } catch (Exception e) {
 
-            LOG.error("Unable to get status for entity " + entity + "(" + type + ")", e);
+            LOG.error("Unable to get status for entity {} ({})", entity, type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -441,7 +442,7 @@ public abstract class AbstractEntityManager {
             Entity[] dependentEntities = dependents.toArray(new Entity[dependents.size()]);
             return new EntityList(dependentEntities, entityObj);
         } catch (Exception e) {
-            LOG.error("Unable to get dependencies for entityName " + entityName + "(" + type + ")", e);
+            LOG.error("Unable to get dependencies for entityName {} ({})", entityName, type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -492,7 +493,7 @@ public abstract class AbstractEntityManager {
             }
             return new EntityList(elements);
         } catch (Exception e) {
-            LOG.error("Unable to get list for entities for (" + type + ")", e);
+            LOG.error("Unable to get list for entities for ({})", type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -513,7 +514,7 @@ public abstract class AbstractEntityManager {
             }
             return entity.toString();
         } catch (Throwable e) {
-            LOG.error("Unable to get entity definition from config " + "store for (" + type + ") " + entityName, e);
+            LOG.error("Unable to get entity definition from config store for ({}): {}", type, entityName, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
 
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java b/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
index 07ab2bc..0bdf94b 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
@@ -30,7 +30,8 @@ import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.logging.LogProvider;
 import org.apache.falcon.resource.InstancesResult.Instance;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletInputStream;
 import javax.servlet.http.HttpServletRequest;
@@ -44,7 +45,7 @@ import java.util.Set;
  * A base class for managing Entity's Instance operations.
  */
 public abstract class AbstractInstanceManager extends AbstractEntityManager {
-    private static final Logger LOG = Logger.getLogger(AbstractInstanceManager.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractInstanceManager.class);
 
     protected void checkType(String type) {
         if (StringUtils.isEmpty(type)) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java b/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
index cc4446c..f98aece 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
@@ -25,7 +25,8 @@ import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.UnschedulableEntityException;
 import org.apache.falcon.monitors.Dimension;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.PathParam;
@@ -38,7 +39,7 @@ import javax.ws.rs.core.Response;
  */
 public abstract class AbstractSchedulableEntityManager extends AbstractEntityManager {
 
-    private static final Logger LOG = Logger.getLogger(AbstractSchedulableEntityManager.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractSchedulableEntityManager.class);
 
     /**
      * Schedules an submitted entity immediately.

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java b/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
index f2b8874..20400fc 100644
--- a/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
+++ b/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
@@ -26,7 +26,8 @@ import org.apache.falcon.resource.proxy.BufferedRequest;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.util.DeploymentProperties;
 import org.apache.falcon.util.RuntimeProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.Consumes;
@@ -49,7 +50,7 @@ import java.util.Properties;
  * A Channel implementation for HTTP.
  */
 public class HTTPChannel extends AbstractChannel {
-    private static final Logger LOG = Logger.getLogger(HTTPChannel.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HTTPChannel.class);
 
     private static final HttpServletRequest DEFAULT_NULL_REQUEST = new NullServletRequest();
 
@@ -66,7 +67,7 @@ public class HTTPChannel extends AbstractChannel {
         try {
             String proxyClassName = DEPLOYMENT_PROPERTIES.getProperty(serviceName + ".proxy");
             service = Class.forName(proxyClassName);
-            LOG.info("Service: " + serviceName + ", url = " + urlPrefix);
+            LOG.info("Service: {}, url = {}", serviceName, urlPrefix);
         } catch (Exception e) {
             throw new FalconException("Unable to initialize channel for " + serviceName, e);
         }
@@ -78,7 +79,7 @@ public class HTTPChannel extends AbstractChannel {
         try {
             Method method = getMethod(service, methodName, args);
             String url = urlPrefix + "/" + pathValue(method, args);
-            LOG.debug("Executing " + url);
+            LOG.debug("Executing {}", url);
 
             HttpServletRequest incomingRequest = getIncomingRequest(args);
             incomingRequest.getInputStream().reset();
@@ -100,10 +101,10 @@ public class HTTPChannel extends AbstractChannel {
                 return (T) response.getEntity(method.getReturnType());
             } else if (response.getClientResponseStatus().getStatusCode()
                     == Response.Status.BAD_REQUEST.getStatusCode()) {
-                LOG.error("Request failed: " + response.getClientResponseStatus().getStatusCode());
+                LOG.error("Request failed: {}", response.getClientResponseStatus().getStatusCode());
                 return (T) response.getEntity(method.getReturnType());
             } else {
-                LOG.error("Request failed: " + response.getClientResponseStatus().getStatusCode());
+                LOG.error("Request failed: {}", response.getClientResponseStatus().getStatusCode());
                 throw new FalconException(response.getEntity(String.class));
             }
         } catch (Throwable e) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java b/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
index bbdaf40..d046ad4 100644
--- a/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
+++ b/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
@@ -23,7 +23,8 @@ import org.apache.falcon.FalconRuntimException;
 import org.apache.falcon.FalconWebException;
 import org.apache.falcon.resource.AbstractEntityManager;
 import org.apache.falcon.util.ReflectionUtils;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.Method;
 
@@ -31,7 +32,7 @@ import java.lang.reflect.Method;
  * Inter-process implementation of a Channel.
  */
 public class IPCChannel extends AbstractChannel {
-    private static final Logger LOG = Logger.getLogger(IPCChannel.class);
+    private static final Logger LOG = LoggerFactory.getLogger(IPCChannel.class);
     private AbstractEntityManager service;
 
     public void init(String ignoreColo, String serviceName) throws FalconException {
@@ -41,7 +42,7 @@ public class IPCChannel extends AbstractChannel {
     @SuppressWarnings("unchecked")
     @Override
     public <T> T invoke(String methodName, Object... args) throws FalconException {
-        LOG.debug("Invoking method " + methodName + " on service " + service.getClass().getName());
+        LOG.debug("Invoking method {} on service {}", methodName, service.getClass().getName());
         Method method = getMethod(service.getClass(), methodName, args);
 
         try {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java b/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
index a1aeca5..93b4c04 100644
--- a/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
+++ b/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
@@ -33,10 +33,11 @@ import org.apache.falcon.metadata.RelationshipProperty;
 import org.apache.falcon.metadata.RelationshipType;
 import org.apache.falcon.service.Services;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
@@ -59,7 +60,7 @@ import java.util.Set;
 @Path("graphs/lineage")
 public class LineageMetadataResource {
 
-    private static final Logger LOG = Logger.getLogger(LineageMetadataResource.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LineageMetadataResource.class);
 
     public static final String RESULTS = "results";
     public static final String TOTAL_SIZE = "totalSize";
@@ -99,7 +100,7 @@ public class LineageMetadataResource {
         checkIfMetadataMappingServiceIsEnabled();
         String file = StartupProperties.get().getProperty("falcon.graph.serialize.path")
                 + "/lineage-graph-" + System.currentTimeMillis() + ".json";
-        LOG.info("Serialize Graph to: " + file);
+        LOG.info("Serialize Graph to: {}", file);
         try {
             GraphUtils.dump(getGraph(), file);
             return Response.ok().build();
@@ -141,7 +142,7 @@ public class LineageMetadataResource {
     @Produces({MediaType.APPLICATION_JSON})
     public Response getVertex(@PathParam("id") final String vertexId) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex for vertexId= " + vertexId);
+        LOG.info("Get vertex for vertexId= {}", vertexId);
         try {
             Vertex vertex = findVertex(vertexId);
 
@@ -180,7 +181,7 @@ public class LineageMetadataResource {
                                         @DefaultValue("false") @QueryParam("relationships")
                                         final String relationships) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex for vertexId= " + vertexId);
+        LOG.info("Get vertex for vertexId= {}", vertexId);
         try {
             Vertex vertex = findVertex(vertexId);
 
@@ -279,7 +280,7 @@ public class LineageMetadataResource {
     public Response getVertices(@QueryParam("key") final String key,
                                 @QueryParam("value") final String value) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertices for property key= " + key + ", value= " + value);
+        LOG.info("Get vertices for property key= {}, value= {}", key, value);
         try {
             JSONObject response = buildJSONResponse(getGraph().getVertices(key, value));
             return Response.ok(response).build();
@@ -303,7 +304,7 @@ public class LineageMetadataResource {
     public Response getVertexEdges(@PathParam("id") String vertexId,
                                    @PathParam("direction") String direction) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex edges for vertexId= " + vertexId + ", direction= " + direction);
+        LOG.info("Get vertex edges for vertexId= {}, direction= {}", vertexId, direction);
         try {
             Vertex vertex = findVertex(vertexId);
 
@@ -392,7 +393,7 @@ public class LineageMetadataResource {
     @Produces({MediaType.APPLICATION_JSON})
     public Response getEdge(@PathParam("id") final String edgeId) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex for edgeId= " + edgeId);
+        LOG.info("Get vertex for edgeId= {}", edgeId);
         try {
             Edge edge = getGraph().getEdge(edgeId);
             if (edge == null) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java b/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
index 52ede1d..5a56b9a 100644
--- a/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
+++ b/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
@@ -23,8 +23,9 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.apache.log4j.Logger;
 import org.apache.log4j.NDC;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -49,7 +50,7 @@ import java.util.UUID;
  */
 public class BasicAuthFilter extends AuthenticationFilter {
 
-    private static final Logger LOG = Logger.getLogger(BasicAuthFilter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(BasicAuthFilter.class);
 
     /**
      * Constant for the configuration property that indicates the prefix.
@@ -178,7 +179,7 @@ public class BasicAuthFilter extends AuthenticationFilter {
                             NDC.push(user + ":" + httpRequest.getMethod() + "/" + httpRequest.getPathInfo());
                             NDC.push(requestId);
                             CurrentUser.authenticate(user);
-                            LOG.info("Request from user: " + user + ", URL=" + getRequestUrl(httpRequest));
+                            LOG.info("Request from user: {}, URL={}", user, getRequestUrl(httpRequest));
 
                             filterChain.doFilter(servletRequest, servletResponse);
                         } finally {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java b/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
index 537a999..35e0ba3 100644
--- a/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
+++ b/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
@@ -31,7 +31,8 @@ import org.apache.falcon.resource.InstancesResult;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.jms.*;
 import java.lang.reflect.InvocationTargetException;
@@ -41,7 +42,7 @@ import java.util.Date;
  * Subscribes to the falcon topic for handling retries and alerts.
  */
 public class FalconTopicSubscriber implements MessageListener, ExceptionListener {
-    private static final Logger LOG = Logger.getLogger(FalconTopicSubscriber.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FalconTopicSubscriber.class);
 
     private final String implementation;
     private final String userName;
@@ -75,7 +76,7 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
             connection.setExceptionListener(this);
             connection.start();
         } catch (Exception e) {
-            LOG.error("Error starting subscriber of topic: " + this.toString(), e);
+            LOG.error("Error starting subscriber of topic: {}", this, e);
             throw new FalconException(e);
         }
     }
@@ -119,8 +120,8 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                         nominalTime, runId, workflowId, workflowUser,
                         System.currentTimeMillis());
                 } else {
-                    LOG.info("Late data handling not applicable for entityType: " + entityType + ", entityName: "
-                        + entityName + " operation: " + operation);
+                    LOG.info("Late data handling not applicable for entityType: {}, entityName: {} operation: {}",
+                            entityType, entityName, operation);
                 }
                 GenericAlert.instrumentSucceededInstance(cluster, entityType,
                     entityName, nominalTime, workflowId, workflowUser, runId, operation,
@@ -129,11 +130,11 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                 notifyMetadataMappingService(entityName, operation, mapMessage.getString(ARG.logDir.getArgName()));
             }
         } catch (JMSException e) {
-            LOG.info("Error in onMessage for subscriber of topic: " + this.toString(), e);
+            LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
         } catch (FalconException e) {
-            LOG.info("Error in onMessage for subscriber of topic: " + this.toString(), e);
+            LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
         } catch (Exception e) {
-            LOG.info("Error in onMessage for subscriber of topic: " + this.toString(), e);
+            LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
         }
     }
 
@@ -153,17 +154,17 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                 .append(mapMessage.getString(arg.getArgName())).append(", ");
         }
         buff.append("}");
-        LOG.debug(buff);
+        LOG.debug(buff.toString());
     }
 
     @Override
     public void onException(JMSException ignore) {
-        LOG.info("Error in onException for subscriber of topic: " + this.toString(), ignore);
+        LOG.info("Error in onException for subscriber of topic: {}", this.toString(), ignore);
     }
 
     public void closeSubscriber() throws FalconException {
         try {
-            LOG.info("Closing subscriber on topic : " + this.topicName);
+            LOG.info("Closing subscriber on topic: {}", this.topicName);
             if (subscriber != null) {
                 subscriber.close();
             }
@@ -171,7 +172,7 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                 connection.close();
             }
         } catch (JMSException e) {
-            LOG.error("Error closing subscriber of topic: " + this.toString(), e);
+            LOG.error("Error closing subscriber of topic: {}", this.toString(), e);
             throw new FalconException(e);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
----------------------------------------------------------------------
diff --git a/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java b/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
index 547cb6d..ee60f4f 100644
--- a/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
+++ b/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
@@ -49,7 +49,7 @@ public class LoggingAspectTest {
         } catch (Exception e) {
             return;
         }
-        Assert.fail("Exepected excpetion");
+        Assert.fail("Expected exception");
     }
 
     @Test(expectedExceptions = FalconWebException.class)

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java b/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
index edfe5a8..3d6bf7b 100644
--- a/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
+++ b/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
@@ -65,9 +65,10 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.CoordinatorJob.Timeunit;
 import org.apache.oozie.client.OozieClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.JAXBElement;
 import java.io.IOException;
@@ -86,7 +87,7 @@ import java.util.Set;
  * Oozie workflow builder for falcon entities.
  */
 public class OozieProcessWorkflowBuilder extends OozieWorkflowBuilder<Process> {
-    private static final Logger LOG = Logger.getLogger(OozieProcessWorkflowBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieProcessWorkflowBuilder.class);
 
     private static final Set<String> FALCON_PROCESS_HIVE_ACTIONS = new HashSet<String>(
             Arrays.asList(new String[]{"recordsize", "user-oozie-workflow", "user-pig-job", "user-hive-job", }));
@@ -102,7 +103,7 @@ public class OozieProcessWorkflowBuilder extends OozieWorkflowBuilder<Process> {
         for (String clusterName : clusters) {
             org.apache.falcon.entity.v0.process.Cluster processCluster = ProcessHelper.getCluster(entity, clusterName);
             if (processCluster.getValidity().getStart().compareTo(processCluster.getValidity().getEnd()) >= 0) {
-                LOG.info("process validity start <= end for cluster " + clusterName + ". Skipping schedule");
+                LOG.info("process validity start <= end for cluster {}. Skipping schedule", clusterName);
                 break;
             }
 


Mime
View raw message