You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by sh...@apache.org on 2014/05/21 09:21:01 UTC

[1/4] FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by Jean-Baptiste Onofré

Repository: incubator-falcon
Updated Branches:
  refs/heads/master 97f89a1e6 -> 5afbf3585


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/replication/src/main/java/org/apache/falcon/replication/CustomReplicator.java
----------------------------------------------------------------------
diff --git a/replication/src/main/java/org/apache/falcon/replication/CustomReplicator.java b/replication/src/main/java/org/apache/falcon/replication/CustomReplicator.java
index 0074c1a..164bfb0 100644
--- a/replication/src/main/java/org/apache/falcon/replication/CustomReplicator.java
+++ b/replication/src/main/java/org/apache/falcon/replication/CustomReplicator.java
@@ -23,7 +23,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.tools.DistCp;
 import org.apache.hadoop.tools.DistCpOptions;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -34,7 +35,7 @@ import java.io.IOException;
  */
 public class CustomReplicator extends DistCp {
 
-    private static final Logger LOG = Logger.getLogger(CustomReplicator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(CustomReplicator.class);
 
     /**
      * Public Constructor. Creates DistCp object with specified input-parameters.
@@ -54,10 +55,9 @@ public class CustomReplicator extends DistCp {
         FilteredCopyListing copyListing = new FilteredCopyListing(job.getConfiguration(),
                 job.getCredentials());
         copyListing.buildListing(fileListingPath, inputOptions);
-        LOG.info("Number of paths considered for copy: " + copyListing.getNumberOfPaths());
-        LOG.info("Number of bytes considered for copy: " + copyListing.getBytesToCopy()
-                + " (Actual number of bytes copied depends on whether any files are "
-                + "skipped or overwritten.)");
+        LOG.info("Number of paths considered for copy: {}", copyListing.getNumberOfPaths());
+        LOG.info("Number of bytes considered for copy: {} (Actual number of bytes copied depends on whether "
+            + "any files are skipped or overwritten)", copyListing.getBytesToCopy());
         return fileListingPath;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
----------------------------------------------------------------------
diff --git a/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java b/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
index 1a7d1db..43cfed9 100644
--- a/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
+++ b/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
@@ -29,7 +29,8 @@ import org.apache.hadoop.tools.DistCp;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -41,7 +42,7 @@ import java.util.regex.Pattern;
  */
 public class FeedReplicator extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(FeedReplicator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FeedReplicator.class);
 
     public static void main(String[] args) throws Exception {
         ToolRunner.run(new Configuration(), new FeedReplicator(), args);
@@ -57,8 +58,7 @@ public class FeedReplicator extends Configured implements Tool {
         Path confPath = new Path("file:///"
                 + System.getProperty("oozie.action.conf.xml"));
 
-        LOG.info(confPath + " found conf ? "
-                + confPath.getFileSystem(conf).exists(confPath));
+        LOG.info("{} found conf ? {}", confPath, confPath.getFileSystem(conf).exists(confPath));
         conf.addResource(confPath);
 
         String falconFeedStorageType = cmd.getOptionValue("falconFeedStorageType").trim();
@@ -146,11 +146,10 @@ public class FeedReplicator extends Configured implements Tool {
         if (files != null) {
             for (FileStatus file : files) {
                 fs.create(new Path(file.getPath(), EntityUtil.SUCCEEDED_FILE_NAME)).close();
-                LOG.info("Created " + new Path(file.getPath(), EntityUtil.SUCCEEDED_FILE_NAME));
+                LOG.info("Created {}", new Path(file.getPath(), EntityUtil.SUCCEEDED_FILE_NAME));
             }
         } else {
-            LOG.info("No files present in path: "
-                    + new Path(targetPath.toString() + "/" + fixedPath).toString());
+            LOG.info("No files present in path: {}", new Path(targetPath, fixedPath));
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/replication/src/main/java/org/apache/falcon/replication/FilteredCopyListing.java
----------------------------------------------------------------------
diff --git a/replication/src/main/java/org/apache/falcon/replication/FilteredCopyListing.java b/replication/src/main/java/org/apache/falcon/replication/FilteredCopyListing.java
index 52fcffe..58c09b4 100644
--- a/replication/src/main/java/org/apache/falcon/replication/FilteredCopyListing.java
+++ b/replication/src/main/java/org/apache/falcon/replication/FilteredCopyListing.java
@@ -24,7 +24,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.SimpleCopyListing;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.regex.Pattern;
@@ -35,7 +36,7 @@ import java.util.regex.Pattern;
  * depending on data availability will work correctly.
  */
 public class FilteredCopyListing extends SimpleCopyListing {
-    private static final Logger LOG = Logger.getLogger(FilteredCopyListing.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FilteredCopyListing.class);
 
     /**
      * Default pattern character: Escape any special meaning.
@@ -56,8 +57,8 @@ public class FilteredCopyListing extends SimpleCopyListing {
         super(configuration, credentials);
         try {
             regex = getRegEx(configuration.get("falcon.include.path", "").trim());
-            LOG.info("Inclusion pattern = " + configuration.get("falcon.include.path"));
-            LOG.info("Regex pattern = " + regex);
+            LOG.info("Inclusion pattern = {}", configuration.get("falcon.include.path"));
+            LOG.info("Regex pattern = {}", regex);
         } catch (IOException e) {
             throw new IllegalArgumentException("Unable to build regex for "
                     + configuration.get("falcon.include.path", ""));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java b/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
index f204b15..bcbdb08 100644
--- a/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
@@ -35,7 +35,8 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.*;
 import java.net.URISyntaxException;
@@ -47,15 +48,14 @@ import java.util.Map;
  */
 public class LateDataHandler extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(LateDataHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LateDataHandler.class);
 
     public static void main(String[] args) throws Exception {
         Configuration conf = new Configuration();
         Path confPath = new Path("file:///"
                 + System.getProperty("oozie.action.conf.xml"));
 
-        LOG.info(confPath + " found ? "
-                + confPath.getFileSystem(conf).exists(confPath));
+        LOG.info("{} found ? {}", confPath, confPath.getFileSystem(conf).exists(confPath));
         conf.addResource(confPath);
         ToolRunner.run(conf, new LateDataHandler(), args);
     }
@@ -99,7 +99,7 @@ public class LateDataHandler extends Configured implements Tool {
         String[] inputFeedStorageTypes = getOptionValue(command, "falconInputFeedStorageTypes").split("#");
 
         Map<String, Long> metrics = computeMetrics(inputFeeds, pathGroups, inputFeedStorageTypes);
-        LOG.info("MAP data: " + metrics);
+        LOG.info("MAP data: {}", metrics);
 
         Path file = new Path(command.getOptionValue("out"));
         persistMetrics(metrics, file);
@@ -276,12 +276,12 @@ public class LateDataHandler extends Configured implements Tool {
 
             for (Map.Entry<String, Long> entry : metrics.entrySet()) {
                 if (recordedMetrics.get(entry.getKey()) == null) {
-                    LOG.info("No matching key " + entry.getKey());
+                    LOG.info("No matching key {}", entry.getKey());
                     continue;
                 }
                 if (!recordedMetrics.get(entry.getKey()).equals(entry.getValue())) {
-                    LOG.info("Recorded size:" + recordedMetrics.get(entry.getKey())
-                            + " is different from new size" + entry.getValue());
+                    LOG.info("Recorded size: {} is different from new size {}",
+                            recordedMetrics.get(entry.getKey()), entry.getValue());
                     buffer.append(entry.getKey()).append(',');
                 }
             }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
index ca2304e..459da84 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
@@ -25,7 +25,8 @@ import org.apache.falcon.rerun.policy.AbstractRerunPolicy;
 import org.apache.falcon.rerun.policy.ExpBackoffPolicy;
 import org.apache.falcon.rerun.queue.DelayedQueue;
 import org.apache.falcon.security.CurrentUser;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for a rerun consumer.
@@ -36,8 +37,7 @@ import org.apache.log4j.Logger;
 public abstract class AbstractRerunConsumer<T extends RerunEvent, M extends AbstractRerunHandler<T, DelayedQueue<T>>>
         implements Runnable {
 
-    protected static final Logger LOG = Logger
-            .getLogger(AbstractRerunConsumer.class);
+    protected static final Logger LOG = LoggerFactory.getLogger(AbstractRerunConsumer.class);
 
     protected M handler;
 
@@ -57,7 +57,7 @@ public abstract class AbstractRerunConsumer<T extends RerunEvent, M extends Abst
                     message = handler.takeFromQueue();
                     attempt = 1;
                 } catch (FalconException e) {
-                    LOG.error("Error while reading message from the queue: ", e);
+                    LOG.error("Error while reading message from the queue", e);
                     GenericAlert.alertRerunConsumerFailed(
                             "Error while reading message from the queue: ", e);
                     Thread.sleep(policy.getDelay(frequency, attempt));
@@ -72,7 +72,7 @@ public abstract class AbstractRerunConsumer<T extends RerunEvent, M extends Abst
                 handleRerun(message.getClusterName(), jobStatus, message);
 
             } catch (Throwable e) {
-                LOG.error("Error in rerun consumer:", e);
+                LOG.error("Error in rerun consumer", e);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
index 0333918..124b444 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
@@ -25,7 +25,8 @@ import org.apache.falcon.rerun.event.RerunEvent;
 import org.apache.falcon.rerun.queue.DelayedQueue;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for handling reruns.
@@ -35,8 +36,7 @@ import org.apache.log4j.Logger;
  */
 public abstract class AbstractRerunHandler<T extends RerunEvent, M extends DelayedQueue<T>> {
 
-    protected static final Logger LOG = Logger
-            .getLogger(LateRerunHandler.class);
+    protected static final Logger LOG = LoggerFactory.getLogger(LateRerunHandler.class);
     protected M delayQueue;
     private AbstractWorkflowEngine wfEngine;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
index 17f4337..16e340e 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
@@ -51,9 +51,8 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
         try {
             if (jobStatus.equals("RUNNING") || jobStatus.equals("PREP")
                     || jobStatus.equals("SUSPENDED")) {
-                LOG.debug(
-                        "Re-enqueing message in LateRerunHandler for workflow with same delay as job status is running:"
-                                + message.getWfId());
+                LOG.debug("Re-enqueing message in LateRerunHandler for workflow with same delay as "
+                    + "job status is running: {}", message.getWfId());
                 message.setMsgInsertTime(System.currentTimeMillis());
                 handler.offerToQueue(message);
                 return;
@@ -62,26 +61,22 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
             String detectLate = detectLate(message);
 
             if (detectLate.equals("")) {
-                LOG.debug("No Late Data Detected, scheduling next late rerun for wf-id: "
-                        + message.getWfId()
-                        + " at "
-                        + SchemaHelper.formatDateUTC(new Date()));
+                LOG.debug("No Late Data Detected, scheduling next late rerun for wf-id: {} at {}",
+                        message.getWfId(), SchemaHelper.formatDateUTC(new Date()));
                 handler.handleRerun(clusterName, message.getEntityType(), message.getEntityName(),
                         message.getInstance(), Integer.toString(message.getRunId()),
                         message.getWfId(), message.getWorkflowUser(), System.currentTimeMillis());
                 return;
             }
 
-            LOG.info("Late changes detected in the following feeds: " + detectLate);
+            LOG.info("Late changes detected in the following feeds: {}", detectLate);
 
             handler.getWfEngine().reRun(message.getClusterName(), message.getWfId(), null);
-            LOG.info("Scheduled late rerun for wf-id: " + message.getWfId()
-                    + " on cluster: " + message.getClusterName());
+            LOG.info("Scheduled late rerun for wf-id: {} on cluster: {}",
+                    message.getWfId(), message.getClusterName());
         } catch (Exception e) {
-            LOG.warn("Late Re-run failed for instance "
-                            + message.getEntityName() + ":"
-                            + message.getInstance() + " after "
-                            + message.getDelayInMilliSec() + " with message:", e);
+            LOG.warn("Late Re-run failed for instance {}:{} after {}",
+                    message.getEntityName(), message.getInstance(), message.getDelayInMilliSec(), e);
             GenericAlert.alertLateRerunFailed(message.getEntityType(), message.getEntityName(),
                     message.getInstance(), message.getWfId(), message.getWorkflowUser(),
                     Integer.toString(message.getRunId()), e.getMessage());
@@ -104,7 +99,7 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
         Configuration conf = LateRerunHandler.getConfiguration(storageEndpoint);
         FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);
         if (!fs.exists(lateLogPath)) {
-            LOG.warn("Late log file:" + lateLogPath + " not found:");
+            LOG.warn("Late log file: {} not found", lateLogPath);
             return "";
         }
 
@@ -128,8 +123,8 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
                 }
             }
         } else {
-            LOG.warn("Late process is not configured for entity: "
-                    + message.getEntityType() + "(" + message.getEntityName() + ")");
+            LOG.warn("Late process is not configured for entity: {} ({})",
+                    message.getEntityType(), message.getEntityName());
         }
 
         return late.detectChanges(lateLogPath, computedMetrics, conf);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
index f1de2cb..c1a79fc 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
@@ -59,7 +59,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
             Date msgInsertTime = EntityUtil.parseDateUTC(nominalTime);
             Long wait = getEventDelay(entity, nominalTime);
             if (wait == -1) {
-                LOG.info("Late rerun expired for entity: " + entityType + "(" + entityName + ")");
+                LOG.info("Late rerun expired for entity: {} ({})", entityType, entityName);
 
                 java.util.Properties properties =
                         this.getWfEngine().getWorkflowProperties(cluster, wfId);
@@ -68,29 +68,27 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
                 Path lateLogPath = this.getLateLogPath(logDir,
                         EntityUtil.fromUTCtoURIDate(nominalTime), srcClusterName);
 
-                LOG.info("Going to delete path:" + lateLogPath);
+                LOG.info("Going to delete path: {}", lateLogPath);
                 final String storageEndpoint = properties.getProperty(AbstractWorkflowEngine.NAME_NODE);
                 Configuration conf = getConfiguration(storageEndpoint);
                 FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);
                 if (fs.exists(lateLogPath)) {
                     boolean deleted = fs.delete(lateLogPath, true);
                     if (deleted) {
-                        LOG.info("Successfully deleted late file path:" + lateLogPath);
+                        LOG.info("Successfully deleted late file path: {}", lateLogPath);
                     }
                 }
                 return;
             }
 
-            LOG.debug("Scheduling the late rerun for entity instance : "
-                    + entityType + "(" + entityName + ")" + ":" + nominalTime
-                    + " And WorkflowId: " + wfId);
+            LOG.debug("Scheduling the late rerun for entity instance: {} ({}): {} And WorkflowId: {}",
+                    entityType, entityName, nominalTime, wfId);
             LaterunEvent event = new LaterunEvent(cluster, wfId, msgInsertTime.getTime(),
                     wait, entityType, entityName, nominalTime, intRunId, workflowUser);
             offerToQueue(event);
         } catch (Exception e) {
-            LOG.error("Unable to schedule late rerun for entity instance : "
-                    + entityType + "(" + entityName + ")" + ":" + nominalTime
-                    + " And WorkflowId: " + wfId, e);
+            LOG.error("Unable to schedule late rerun for entity instance: {} ({}): {} And WorkflowId: {}",
+                    entityType, entityName, nominalTime, wfId, e);
             GenericAlert.alertLateRerunFailed(entityType, entityName,
                     nominalTime, wfId, workflowUser, runId, e.getMessage());
         }
@@ -102,8 +100,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
         Date instanceDate = EntityUtil.parseDateUTC(nominalTime);
         LateProcess lateProcess = EntityUtil.getLateProcess(entity);
         if (lateProcess == null) {
-            LOG.warn("Late run not applicable for entity:"
-                    + entity.getEntityType() + "(" + entity.getName() + ")");
+            LOG.warn("Late run not applicable for entity: {} ({})", entity.getEntityType(), entity.getName());
             return -1;
         }
         PolicyType latePolicy = lateProcess.getPolicy();
@@ -112,9 +109,8 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
         Long wait;
 
         if (now.after(cutOffTime)) {
-            LOG.warn("Feed Cut Off time: "
-                    + SchemaHelper.formatDateUTC(cutOffTime)
-                    + " has expired, Late Rerun can not be scheduled");
+            LOG.warn("Feed Cut Off time: {} has expired, Late Rerun can not be scheduled",
+                    SchemaHelper.formatDateUTC(cutOffTime));
             return -1;
         } else {
             AbstractRerunPolicy rerunPolicy = RerunPolicyFactory
@@ -137,8 +133,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
         Date feedCutOff = new Date(0);
         if (entity.getEntityType() == EntityType.FEED) {
             if (((Feed) entity).getLateArrival() == null) {
-                LOG.debug("Feed's " + entity.getName()
-                        + " late arrival cut-off is not configured, returning");
+                LOG.debug("Feed's {} late arrival cut-off is not configured, returning", entity.getName());
                 return feedCutOff;
             }
             String lateCutOff = ((Feed) entity).getLateArrival().getCutOff()
@@ -164,8 +159,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
                     throw new IllegalStateException("No such feed: " + lp.getInput());
                 }
                 if (feed.getLateArrival() == null) {
-                    LOG.debug("Feed's " + feed.getName()
-                            + " late arrival cut-off is not configured, ignoring this feed");
+                    LOG.debug("Feed's {} late arrival cut-off is not configured, ignoring this feed", feed.getName());
                     continue;
                 }
                 String lateCutOff = feed.getLateArrival().getCutOff()
@@ -181,9 +175,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
             }
             return feedCutOff;
         } else {
-            throw new FalconException(
-                    "Invalid entity while getting cut-off time:"
-                            + entity.getName());
+            throw new FalconException("Invalid entity while getting cut-off time:" + entity.getName());
         }
     }
 
@@ -194,7 +186,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
         daemon.setName("LaterunHandler");
         daemon.setDaemon(true);
         daemon.start();
-        LOG.info("Laterun Handler  thread started");
+        LOG.info("Laterun Handler thread started");
     }
 
     public Path getLateLogPath(String logDir, String nominalTime,

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
index bb0b34a..685afe2 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
@@ -43,41 +43,28 @@ public class RetryConsumer<T extends RetryHandler<DelayedQueue<RetryEvent>>>
         try {
             if (!jobStatus.equals("KILLED")) {
                 LOG.debug("Re-enqueing message in RetryHandler for workflow with same delay as job status is running:"
-                        + message.getWfId());
+                    + " {}", message.getWfId());
                 message.setMsgInsertTime(System.currentTimeMillis());
                 handler.offerToQueue(message);
                 return;
             }
-            LOG.info("Retrying attempt:"
-                    + (message.getRunId() + 1)
-                    + " out of configured: "
-                    + message.getAttempts()
-                    + " attempt for instance::"
-                    + message.getEntityName()
-                    + ":"
-                    + message.getInstance()
-                    + " And WorkflowId: "
-                    + message.getWfId()
-                    + " At time: "
-                    + SchemaHelper.formatDateUTC(new Date(System
-                    .currentTimeMillis())));
+            LOG.info("Retrying attempt: {} out of configured: {} attempt for instance: {}:{} And WorkflowId: {}"
+                    + " At time: {}",
+                    (message.getRunId() + 1), message.getAttempts(), message.getEntityName(), message.getInstance(),
+                    message.getWfId(), SchemaHelper.formatDateUTC(new Date(System.currentTimeMillis())));
             handler.getWfEngine().reRun(message.getClusterName(),
                     message.getWfId(), null);
         } catch (Exception e) {
             int maxFailRetryCount = Integer.parseInt(StartupProperties.get()
                     .getProperty("max.retry.failure.count", "1"));
             if (message.getFailRetryCount() < maxFailRetryCount) {
-                LOG.warn(
-                        "Retrying again for process instance "
-                                + message.getEntityName() + ":"
-                                + message.getInstance() + " after "
-                                + message.getDelayInMilliSec()
-                                + " seconds as Retry failed with message:", e);
+                LOG.warn("Retrying again for process instance {}:{} after {} seconds as Retry failed",
+                        message.getEntityName(), message.getInstance(), message.getDelayInMilliSec(), e);
                 message.setFailRetryCount(message.getFailRetryCount() + 1);
                 try {
                     handler.offerToQueue(message);
                 } catch (Exception ex) {
-                    LOG.error("Unable to re-offer to queue:", ex);
+                    LOG.error("Unable to re-offer to queue", ex);
                     GenericAlert.alertRetryFailed(message.getEntityType(),
                             message.getEntityName(), message.getInstance(),
                             message.getWfId(), message.getWorkflowUser(),
@@ -85,10 +72,8 @@ public class RetryConsumer<T extends RetryHandler<DelayedQueue<RetryEvent>>>
                             ex.getMessage());
                 }
             } else {
-                LOG.warn(
-                        "Failure retry attempts exhausted for instance: "
-                                + message.getEntityName() + ":"
-                                + message.getInstance(), e);
+                LOG.warn("Failure retry attempts exhausted for instance: {}:{}",
+                        message.getEntityName(), message.getInstance(), e);
                 GenericAlert.alertRetryFailed(message.getEntityType(),
                         message.getEntityName(), message.getInstance(),
                         message.getWfId(), message.getWorkflowUser(),

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
index ef49c3a..8d8ea03 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
@@ -46,8 +46,8 @@ public class RetryHandler<M extends DelayedQueue<RetryEvent>> extends
             Retry retry = getRetry(entity);
 
             if (retry == null) {
-                LOG.warn("Retry not configured for entity:" + entityType + "("
-                        + entity.getName() + "), ignoring failed retries");
+                LOG.warn("Retry not configured for entity: {} ({}), ignoring failed retried",
+                        entityType, entity.getName());
                 return;
             }
 
@@ -64,10 +64,8 @@ public class RetryHandler<M extends DelayedQueue<RetryEvent>> extends
                         nominalTime, intRunId, attempts, 0, workflowUser);
                 offerToQueue(event);
             } else {
-                LOG.warn("All retry attempt failed out of configured: "
-                        + attempts + " attempt for entity instance::"
-                        + entityName + ":" + nominalTime + " And WorkflowId: "
-                        + wfId);
+                LOG.warn("All retry attempt failed out of configured: {} attempt for entity instance: {}:{} "
+                    + "And WorkflowId: {}", attempts, entityName, nominalTime, wfId);
 
                 GenericAlert.alertRetryFailed(entityType, entityName,
                         nominalTime, wfId, workflowUser, runId,
@@ -75,7 +73,7 @@ public class RetryHandler<M extends DelayedQueue<RetryEvent>> extends
                                 + attempts + " attempt for entity instance::");
             }
         } catch (FalconException e) {
-            LOG.error("Error during retry of entity instance " + entityName + ":" + nominalTime, e);
+            LOG.error("Error during retry of entity instance {}:{}", entityName, nominalTime, e);
             GenericAlert.alertRetryFailed(entityType, entityName, nominalTime,
                     wfId, workflowUser, runId, e.getMessage());
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/queue/ActiveMQueue.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/queue/ActiveMQueue.java b/rerun/src/main/java/org/apache/falcon/rerun/queue/ActiveMQueue.java
index 3fa5282..d9567b6 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/queue/ActiveMQueue.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/queue/ActiveMQueue.java
@@ -57,12 +57,12 @@ public class ActiveMQueue<T extends RerunEvent> extends DelayedQueue<T> {
                     event.getDelay(TimeUnit.MILLISECONDS));
             msg.setStringProperty("TYPE", event.getType().name());
             producer.send(msg);
-            LOG.debug("Enqueued Message:" + event.toString() + "with delay "
-                    + event.getDelay(TimeUnit.MILLISECONDS) + " milli sec");
+            LOG.debug("Enqueued Message: {} with delay {} milli sec",
+                    event.toString(), event.getDelay(TimeUnit.MILLISECONDS));
             return true;
         } catch (Exception e) {
-            LOG.error("Unable to offer event:" + event + " to activeMqueue", e);
-            throw new FalconException("Unable to offer event:" + event + " to activeMqueue", e);
+            LOG.error("Unable to offer event: {} to ActiveMQ", event, e);
+            throw new FalconException("Unable to offer event:" + event + " to ActiveMQ", e);
         }
     }
 
@@ -82,11 +82,11 @@ public class ActiveMQueue<T extends RerunEvent> extends DelayedQueue<T> {
             T event = new RerunEventFactory<T>().getRerunEvent(
                     textMessage.getStringProperty("TYPE"),
                     textMessage.getText());
-            LOG.debug("Dequeued Message:" + event.toString());
+            LOG.debug("Dequeued Message: {}", event.toString());
             return event;
         } catch (Exception e) {
-            LOG.error("Error getting the messge from ActiveMqueue: ", e);
-            throw new FalconException("Error getting the messge from ActiveMqueue: ", e);
+            LOG.error("Error getting the message from ActiveMQ", e);
+            throw new FalconException("Error getting the message from ActiveMQ: ", e);
         }
     }
 
@@ -103,14 +103,10 @@ public class ActiveMQueue<T extends RerunEvent> extends DelayedQueue<T> {
             destination = session.createQueue(destinationName);
             producer = session.createProducer(destination);
             consumer = session.createConsumer(destination);
-            LOG.info("Initialized Queue on activeMQ: " + destinationName);
+            LOG.info("Initialized Queue on ActiveMQ: {}", destinationName);
         } catch (Exception e) {
-            LOG.error(
-                    "Error starting ActiveMQueue connection for dealyed queue",
-                    e);
-            throw new RuntimeException(
-                    "Error starting ActiveMQueue connection for delayed queue",
-                    e);
+            LOG.error("Error starting ActiveMQ connection for delayed queue", e);
+            throw new RuntimeException("Error starting ActiveMQ connection for delayed queue", e);
         }
     }
 
@@ -120,7 +116,7 @@ public class ActiveMQueue<T extends RerunEvent> extends DelayedQueue<T> {
                 userName, password, url);
         connection = (ActiveMQConnection) connectionFactory.createConnection();
         connection.start();
-        LOG.info("Connected successfully to " + url);
+        LOG.info("Connected successfully to {}", url);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/queue/DelayedQueue.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/queue/DelayedQueue.java b/rerun/src/main/java/org/apache/falcon/rerun/queue/DelayedQueue.java
index 393a8e5..01231d4 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/queue/DelayedQueue.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/queue/DelayedQueue.java
@@ -19,7 +19,8 @@ package org.apache.falcon.rerun.queue;
 
 import org.apache.falcon.FalconException;
 import org.apache.falcon.rerun.event.RerunEvent;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.List;
 
@@ -28,7 +29,7 @@ import java.util.List;
  * @param <T>
  */
 public abstract class DelayedQueue<T extends RerunEvent> {
-    public static final Logger LOG = Logger.getLogger(DelayedQueue.class);
+    public static final Logger LOG = LoggerFactory.getLogger(DelayedQueue.class);
 
     public abstract boolean offer(T event) throws FalconException;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java b/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
index bc7c999..10ca1ed 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
@@ -22,7 +22,8 @@ import org.apache.commons.io.IOUtils;
 import org.apache.falcon.aspect.GenericAlert;
 import org.apache.falcon.rerun.event.RerunEvent;
 import org.apache.falcon.rerun.event.RerunEventFactory;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.*;
 import java.util.ArrayList;
@@ -34,7 +35,7 @@ import java.util.concurrent.DelayQueue;
  * @param <T>
  */
 public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
-    public static final Logger LOG = Logger.getLogger(DelayedQueue.class);
+    public static final Logger LOG = LoggerFactory.getLogger(DelayedQueue.class);
 
     protected DelayQueue<T> delayQueue = new DelayQueue<T>();
     private final File serializeFilePath;
@@ -47,7 +48,7 @@ public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
     public boolean offer(T event) {
         boolean flag = delayQueue.offer(event);
         beforeRetry(event);
-        LOG.debug("Enqueued Message:" + event.toString());
+        LOG.debug("Enqueued Message: {}", event.toString());
         return flag;
     }
 
@@ -56,7 +57,7 @@ public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
         T event;
         try {
             event = delayQueue.take();
-            LOG.debug("Dequeued Message:" + event.toString());
+            LOG.debug("Dequeued Message: {}", event.toString());
             afterRetry(event);
         } catch (InterruptedException e) {
             throw new FalconException(e);
@@ -90,9 +91,8 @@ public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
             out.newLine();
             out.close();
         } catch (IOException e) {
-            LOG.warn("Unable to write entry for process-instance: "
-                            + event.getEntityName() + ":"
-                            + event.getInstance(), e);
+            LOG.warn("Unable to write entry for process-instance: {}:{}",
+                            event.getEntityName(), event.getInstance(), e);
         } finally {
             IOUtils.closeQuietly(out);
         }
@@ -107,14 +107,14 @@ public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
     private void afterRetry(T event) {
         File retryFile = getRetryFile(serializeFilePath, event);
         if (!retryFile.exists()) {
-            LOG.warn("Rerun file deleted or renamed for process-instance: "
-                    + event.getEntityName() + ":" + event.getInstance());
+            LOG.warn("Rerun file deleted or renamed for process-instance: {}:{}",
+                    event.getEntityName(), event.getInstance());
             GenericAlert.alertRetryFailed(event.getEntityType(), event.getEntityName(), event.getInstance(),
                     event.getWfId(), event.getWorkflowUser(), Integer.toString(event.getRunId()),
                     "Rerun file deleted or renamed for process-instance:");
         } else {
             if (!retryFile.delete()) {
-                LOG.warn("Unable to remove rerun file " + event.getWfId());
+                LOG.warn("Unable to remove rerun file {}", event.getWfId());
                 retryFile.deleteOnExit();
             }
         }
@@ -135,7 +135,7 @@ public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
                         rerunEvents.add(event);
                     }
                 } catch (Exception e) {
-                    LOG.warn("Not able to read rerun entry " + rerunFile.getAbsolutePath(), e);
+                    LOG.warn("Not able to read rerun entry {}", rerunFile.getAbsolutePath(), e);
                 } finally {
                     IOUtils.closeQuietly(reader);
                 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/service/LateRunService.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/service/LateRunService.java b/rerun/src/main/java/org/apache/falcon/rerun/service/LateRunService.java
index cdbadaa..884a8e2 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/service/LateRunService.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/service/LateRunService.java
@@ -25,7 +25,8 @@ import org.apache.falcon.rerun.handler.RerunHandlerFactory;
 import org.apache.falcon.rerun.queue.ActiveMQueue;
 import org.apache.falcon.service.FalconService;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 
@@ -34,7 +35,7 @@ import java.io.File;
  */
 public class LateRunService implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(LateRunService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LateRunService.class);
 
     @Override
     public String getName() {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/rerun/src/main/java/org/apache/falcon/rerun/service/RetryService.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/service/RetryService.java b/rerun/src/main/java/org/apache/falcon/rerun/service/RetryService.java
index b989acd..bc64ef5 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/service/RetryService.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/service/RetryService.java
@@ -26,7 +26,8 @@ import org.apache.falcon.rerun.queue.DelayedQueue;
 import org.apache.falcon.rerun.queue.InMemoryQueue;
 import org.apache.falcon.service.FalconService;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 
@@ -35,7 +36,7 @@ import java.io.File;
  */
 public class RetryService implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(RetryService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(RetryService.class);
 
     @Override
     public String getName() {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
----------------------------------------------------------------------
diff --git a/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java b/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
index 138a769..4de7938 100644
--- a/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
+++ b/retention/src/main/java/org/apache/falcon/retention/FeedEvictor.java
@@ -18,31 +18,6 @@
 
 package org.apache.falcon.retention;
 
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.PrintStream;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TimeZone;
-import java.util.TreeMap;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.servlet.jsp.el.ELException;
-import javax.servlet.jsp.el.ExpressionEvaluator;
-
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.Option;
@@ -68,7 +43,32 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.jsp.el.ELException;
+import javax.servlet.jsp.el.ExpressionEvaluator;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 /**
  * Feed Evictor is called only if the retention policy that applies
@@ -76,7 +76,7 @@ import org.apache.log4j.Logger;
  */
 public class FeedEvictor extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(FeedEvictor.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FeedEvictor.class);
 
     private static final ExpressionEvaluator EVALUATOR = new ExpressionEvaluatorImpl();
     private static final ExpressionHelper RESOLVER = ExpressionHelper.get();
@@ -98,7 +98,7 @@ public class FeedEvictor extends Configured implements Tool {
         Configuration conf = new Configuration();
         Path confPath = new Path("file:///" + System.getProperty("oozie.action.conf.xml"));
 
-        LOG.info(confPath + " found ? " + confPath.getFileSystem(conf).exists(confPath));
+        LOG.info("{} found ? {}", confPath, confPath.getFileSystem(conf).exists(confPath));
         conf.addResource(confPath);
         int ret = ToolRunner.run(conf, new FeedEvictor(), args);
         if (ret != 0) {
@@ -123,9 +123,8 @@ public class FeedEvictor extends Configured implements Tool {
         String logFile = cmd.getOptionValue("logFile");
         String feedStorageType = cmd.getOptionValue("falconFeedStorageType");
 
-        LOG.info("Applying retention on " + feedPattern + " type: " + retentionType
-                + ", Limit: " + retentionLimit + ", timezone: " + timeZone
-                + ", frequency: " + frequency + ", storage" + feedStorageType);
+        LOG.info("Applying retention on {} type: {}, Limit: {}, timezone: {}, frequency: {}, storage: {}",
+                feedPattern, retentionType, retentionLimit, timeZone, frequency, feedStorageType);
 
         Storage storage = FeedHelper.createStorage(feedStorageType, feedPattern);
         evict(storage, retentionLimit, timeZone);
@@ -166,7 +165,7 @@ public class FeedEvictor extends Configured implements Tool {
         Path normalizedPath = new Path(feedPath);
         FileSystem fs = normalizedPath.getFileSystem(getConf());
         feedPath = normalizedPath.toUri().getPath();
-        LOG.info("Normalized path : " + feedPath);
+        LOG.info("Normalized path: {}", feedPath);
 
         Pair<Date, Date> range = getDateRange(retentionLimit);
         String dateMask = getDateFormatInPath(feedPath);
@@ -199,14 +198,12 @@ public class FeedEvictor extends Configured implements Tool {
     }
 
     private void logInstancePaths(Path path) throws IOException {
-        LOG.info("Writing deleted instances to path " + path);
+        LOG.info("Writing deleted instances to path {}", path);
         FileSystem logfs = path.getFileSystem(getConf());
         OutputStream out = logfs.create(path);
         out.write(instancePaths.toString().getBytes());
         out.close();
-        if (LOG.isDebugEnabled()) {
-            debug(logfs, path);
-        }
+        debug(logfs, path);
     }
 
     private Pair<Date, Date> getDateRange(String period) throws ELException {
@@ -229,8 +226,8 @@ public class FeedEvictor extends Configured implements Tool {
         for (FileStatus file : files) {
             Date date = getDate(new Path(file.getPath().toUri().getPath()),
                     inPath, dateMask, timeZone);
-            LOG.debug("Considering " + file.getPath().toUri().getPath());
-            LOG.debug("Date : " + date);
+            LOG.debug("Considering {}", file.getPath().toUri().getPath());
+            LOG.debug("Date: {}", date);
             if (date != null && !isDateInRange(date, start)) {
                 toBeDeleted.add(new Path(file.getPath().toUri().getPath()));
             }
@@ -256,7 +253,7 @@ public class FeedEvictor extends Configured implements Tool {
             feedBasePath = feedBasePath.replaceAll(Pattern.quote(var), "*");
             matcher = FeedDataPath.PATTERN.matcher(feedBasePath);
         }
-        LOG.info("Searching for " + feedBasePath);
+        LOG.info("Searching for {}", feedBasePath);
         return fs.globStatus(new Path(feedBasePath));
     }
 
@@ -278,17 +275,17 @@ public class FeedEvictor extends Configured implements Tool {
 
         String errArg = file + "(" + inMask + ")";
         if (map.isEmpty()) {
-            LOG.warn("No date present in " + errArg);
+            LOG.warn("No date present in {}", errArg);
             return null;
         }
 
-        String date = "";
+        StringBuilder date = new StringBuilder();
         int ordinal = 0;
         for (VARS var : map.keySet()) {
             if (ordinal++ == var.ordinal()) {
-                date += map.get(var);
+                date.append(map.get(var));
             } else {
-                LOG.warn("Prior element to " + var + " is missing " + errArg);
+                LOG.warn("Prior element to {} is missing {}", var, errArg);
                 return null;
             }
         }
@@ -297,9 +294,9 @@ public class FeedEvictor extends Configured implements Tool {
             DateFormat dateFormat = new SimpleDateFormat(FORMAT.
                     substring(0, date.length()));
             dateFormat.setTimeZone(TimeZone.getTimeZone(timeZone));
-            return dateFormat.parse(date);
+            return dateFormat.parse(date.toString());
         } catch (ParseException e) {
-            LOG.warn("Unable to parse date : " + date + ", " + errArg);
+            LOG.warn("Unable to parse date: {}, {}", date, errArg);
             return null;
         }
     }
@@ -326,7 +323,7 @@ public class FeedEvictor extends Configured implements Tool {
 
     private void deleteInstance(FileSystem fs, Path path, Path feedBasePath) throws IOException {
         if (fs.delete(path, true)) {
-            LOG.info("Deleted instance :" + path);
+            LOG.info("Deleted instance: {}", path);
         }else{
             throw new IOException("Unable to delete instance: " + path);
         }
@@ -337,8 +334,8 @@ public class FeedEvictor extends Configured implements Tool {
         ByteArrayOutputStream writer = new ByteArrayOutputStream();
         InputStream instance = fs.open(outPath);
         IOUtils.copyBytes(instance, writer, 4096, true);
-        LOG.debug("Instance Paths copied to " + outPath);
-        LOG.debug("Written " + writer);
+        LOG.debug("Instance Paths copied to {}", outPath);
+        LOG.debug("Written {}", writer);
     }
 
     private CommandLine getCommand(String[] args) throws org.apache.commons.cli.ParseException {
@@ -382,8 +379,8 @@ public class FeedEvictor extends Configured implements Tool {
     private void evictTable(CatalogStorage storage, String retentionLimit, String timeZone)
         throws Exception {
 
-        LOG.info("Applying retention on " + storage.getTable()
-                + ", Limit: " + retentionLimit + ", timezone: " + timeZone);
+        LOG.info("Applying retention on {}, Limit: {}, timezone: {}",
+                storage.getTable(), retentionLimit, timeZone);
 
         // get sorted date partition keys and values
         List<String> datedPartKeys = new ArrayList<String>();
@@ -562,13 +559,13 @@ public class FeedEvictor extends Configured implements Tool {
 
     private void deleteParentIfEmpty(FileSystem fs, Path parent, Path feedBasePath) throws IOException {
         if (feedBasePath.equals(parent)) {
-            LOG.info("Not deleting feed base path:" + parent);
+            LOG.info("Not deleting feed base path: {}", parent);
         } else {
             FileStatus[] files = fs.listStatus(parent);
             if (files != null && files.length == 0) {
-                LOG.info("Parent path: " + parent + " is empty, deleting path");
+                LOG.info("Parent path: {} is empty, deleting path", parent);
                 if (fs.delete(parent, true)) {
-                    LOG.info("Deleted empty dir: " + parent);
+                    LOG.info("Deleted empty dir: {}", parent);
                 } else {
                     throw new IOException("Unable to delete parent path:" + parent);
                 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
index af29f93..6e2fad5 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
@@ -28,7 +28,8 @@ import org.apache.falcon.hadoop.JailedFileSystem;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
@@ -38,7 +39,7 @@ import java.security.PrivilegedExceptionAction;
  */
 public class EmbeddedCluster {
 
-    private static final Logger LOG = Logger.getLogger(EmbeddedCluster.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EmbeddedCluster.class);
 
     protected EmbeddedCluster() {
     }
@@ -82,7 +83,7 @@ public class EmbeddedCluster {
         cluster.conf.set("fs.default.name", "jail://" + (global ? "global" : name) + ":00");
 
         String hdfsUrl = cluster.conf.get("fs.default.name");
-        LOG.info("Cluster Namenode = " + hdfsUrl);
+        LOG.info("Cluster Namenode = {}", hdfsUrl);
         cluster.buildClusterObject(name);
         return cluster;
     }
@@ -95,7 +96,7 @@ public class EmbeddedCluster {
         clusterEntity = new Cluster();
         clusterEntity.setName(name);
         clusterEntity.setColo("local");
-        clusterEntity.setDescription("Embeded cluster: " + name);
+        clusterEntity.setDescription("Embedded cluster: " + name);
 
         Interfaces interfaces = new Interfaces();
         interfaces.getInterfaces().add(newInterface(Interfacetype.WORKFLOW,

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
index ab1d038..e4b5592 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/StandAloneCluster.java
@@ -22,7 +22,8 @@ import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interface;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 
@@ -30,7 +31,7 @@ import java.io.File;
  * A standalone cluster.
  */
 public final class StandAloneCluster extends EmbeddedCluster {
-    private static final Logger LOG = Logger.getLogger(StandAloneCluster.class);
+    private static final Logger LOG = LoggerFactory.getLogger(StandAloneCluster.class);
 
     private StandAloneCluster() {
     }
@@ -47,7 +48,7 @@ public final class StandAloneCluster extends EmbeddedCluster {
             }
         }
 
-        LOG.info("Cluster Namenode = " + cluster.getConf().get("fs.default.name"));
+        LOG.info("Cluster Namenode = {}", cluster.getConf().get("fs.default.name"));
         return cluster;
     }
 


[3/4] git commit: FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by Jean-Baptiste Onofré

Posted by sh...@apache.org.
FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by  Jean-Baptiste Onofré


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/b036d740
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/b036d740
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/b036d740

Branch: refs/heads/master
Commit: b036d740a4dc13bb5e8102dde71519229f202506
Parents: d2ac5b6
Author: Shwetha GS <sh...@inmobi.com>
Authored: Wed May 21 12:44:35 2014 +0530
Committer: Shwetha GS <sh...@inmobi.com>
Committed: Wed May 21 12:48:12 2014 +0530

----------------------------------------------------------------------
 CHANGES.txt                                     |   1 +
 addons/designer/pom.xml                         |  10 +-
 client/pom.xml                                  |   4 +
 .../falcon/catalog/HiveCatalogService.java      |  21 ++--
 .../falcon/cleanup/AbstractCleanupHandler.java  |  19 ++--
 .../falcon/cleanup/FeedCleanupHandler.java      |   8 +-
 .../falcon/cleanup/ProcessCleanupHandler.java   |   8 +-
 .../entity/parser/ClusterEntityParser.java      |  19 ++--
 .../falcon/entity/parser/EntityParser.java      |   7 +-
 .../falcon/entity/parser/FeedEntityParser.java  |   9 +-
 .../falcon/entity/store/ConfigurationStore.java |  17 +--
 .../apache/falcon/entity/v0/EntityGraph.java    |   9 +-
 .../EntityRelationshipGraphBuilder.java         |  27 ++---
 .../org/apache/falcon/metadata/GraphUtils.java  |   9 +-
 .../InstanceRelationshipGraphBuilder.java       |  17 +--
 .../apache/falcon/metadata/LineageRecorder.java |  11 +-
 .../falcon/metadata/MetadataMappingService.java |  23 +++--
 .../metadata/RelationshipGraphBuilder.java      |  23 ++---
 .../AuthenticationInitializationService.java    |   8 +-
 .../org/apache/falcon/security/CurrentUser.java |   7 +-
 .../falcon/service/LogCleanupService.java       |   7 +-
 .../falcon/service/ServiceInitializer.java      |  17 +--
 .../org/apache/falcon/update/UpdateHelper.java  |  15 +--
 .../falcon/util/ApplicationProperties.java      |  15 +--
 .../org/apache/falcon/util/DeploymentUtil.java  |   9 +-
 .../apache/falcon/util/RuntimeProperties.java   |   5 +-
 .../entity/store/ConfigurationStoreTest.java    |   7 +-
 .../workflow/OozieFeedWorkflowBuilder.java      |  15 +--
 .../falcon/listener/HadoopStartupListener.java  |   7 +-
 .../falcon/messaging/EntityInstanceMessage.java |  17 +--
 .../falcon/messaging/MessageProducer.java       |  16 +--
 metrics/pom.xml                                 |   4 +-
 .../falcon/aspect/AbstractFalconAspect.java     |   8 +-
 .../org/apache/falcon/plugin/LoggingPlugin.java |   7 +-
 oozie-el-extensions/pom.xml                     |   9 +-
 .../falcon/logging/DefaultTaskLogRetriever.java |   9 +-
 .../org/apache/falcon/logging/LogMover.java     |  19 ++--
 .../org/apache/falcon/logging/LogProvider.java  |   7 +-
 .../service/SharedLibraryHostingService.java    |  11 +-
 .../falcon/workflow/FalconPostProcessing.java   |  13 +--
 .../falcon/workflow/OozieWorkflowBuilder.java   |  15 +--
 .../workflow/engine/OozieClientFactory.java     |   7 +-
 .../engine/OozieHouseKeepingService.java        |   7 +-
 .../workflow/engine/OozieWorkflowEngine.java    | 102 +++++++++---------
 .../apache/oozie/client/ProxyOozieClient.java   |  13 +--
 .../falcon/logging/v1/TaskLogRetrieverV1.java   |  12 +--
 pom.xml                                         |  10 +-
 .../org/apache/falcon/FalconWebException.java   |  13 +--
 prism/src/main/java/org/apache/falcon/Main.java |   7 +-
 .../falcon/listener/ContextStartupListener.java |   9 +-
 .../plugin/ChainableMonitoringPlugin.java       |  11 +-
 .../falcon/resource/AbstractEntityManager.java  |  29 +++---
 .../resource/AbstractInstanceManager.java       |   5 +-
 .../AbstractSchedulableEntityManager.java       |   5 +-
 .../falcon/resource/channel/HTTPChannel.java    |  13 +--
 .../falcon/resource/channel/IPCChannel.java     |   7 +-
 .../metadata/LineageMetadataResource.java       |  17 +--
 .../apache/falcon/security/BasicAuthFilter.java |   7 +-
 .../falcon/service/FalconTopicSubscriber.java   |  25 ++---
 .../apache/falcon/aspect/LoggingAspectTest.java |   2 +-
 .../workflow/OozieProcessWorkflowBuilder.java   |   7 +-
 .../falcon/replication/CustomReplicator.java    |  12 +--
 .../falcon/replication/FeedReplicator.java      |  13 ++-
 .../falcon/replication/FilteredCopyListing.java |   9 +-
 .../apache/falcon/latedata/LateDataHandler.java |  16 +--
 .../rerun/handler/AbstractRerunConsumer.java    |  10 +-
 .../rerun/handler/AbstractRerunHandler.java     |   6 +-
 .../falcon/rerun/handler/LateRerunConsumer.java |  29 +++---
 .../falcon/rerun/handler/LateRerunHandler.java  |  36 +++----
 .../falcon/rerun/handler/RetryConsumer.java     |  35 ++-----
 .../falcon/rerun/handler/RetryHandler.java      |  12 +--
 .../apache/falcon/rerun/queue/ActiveMQueue.java |  26 ++---
 .../apache/falcon/rerun/queue/DelayedQueue.java |   5 +-
 .../falcon/rerun/queue/InMemoryQueue.java       |  22 ++--
 .../falcon/rerun/service/LateRunService.java    |   5 +-
 .../falcon/rerun/service/RetryService.java      |   5 +-
 .../apache/falcon/retention/FeedEvictor.java    | 103 +++++++++----------
 .../falcon/cluster/util/EmbeddedCluster.java    |   9 +-
 .../falcon/cluster/util/StandAloneCluster.java  |   7 +-
 79 files changed, 577 insertions(+), 580 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 7496edd..df4b983 100755
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -7,6 +7,7 @@ Trunk (Unreleased)
   NEW FEATURES
 
   IMPROVEMENTS
+   FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. (Jean-Baptiste Onofré via Shwetha GS)
 
   OPTIMIZATIONS
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/addons/designer/pom.xml
----------------------------------------------------------------------
diff --git a/addons/designer/pom.xml b/addons/designer/pom.xml
index 82f4d99..3e1a98a 100644
--- a/addons/designer/pom.xml
+++ b/addons/designer/pom.xml
@@ -96,7 +96,7 @@
 
         <include.prism>true</include.prism>
 
-        <slf4j.version>1.6.1</slf4j.version>
+        <slf4j.version>1.7.5</slf4j.version>
         <hive.version>0.11.0</hive.version>
         <hcatalog.version>0.11.0</hcatalog.version>
         <jetty.version>6.1.26</jetty.version>
@@ -178,12 +178,6 @@
             </dependency>
 
             <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-simple</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-
-            <dependency>
                 <groupId>commons-lang</groupId>
                 <artifactId>commons-lang</artifactId>
                 <version>2.6</version>
@@ -192,7 +186,7 @@
             <dependency>
                 <groupId>log4j</groupId>
                 <artifactId>log4j</artifactId>
-                <version>1.2.15</version>
+                <version>1.2.17</version>
                 <scope>compile</scope>
                 <exclusions>
                     <exclusion>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index 28e24b7..70e0db6 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -88,6 +88,10 @@
         </dependency>
 
         <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java b/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
index 30736f3..b5be4e1 100644
--- a/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
+++ b/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
@@ -31,7 +31,8 @@ import org.apache.hcatalog.api.HCatTable;
 import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hcatalog.common.HCatException;
 import org.apache.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
@@ -46,7 +47,7 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public class HiveCatalogService extends AbstractCatalogService {
 
-    private static final Logger LOG = Logger.getLogger(HiveCatalogService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HiveCatalogService.class);
 
     private static final ConcurrentHashMap<String, HCatClient> CACHE = new ConcurrentHashMap<String, HCatClient>();
 
@@ -61,7 +62,7 @@ public class HiveCatalogService extends AbstractCatalogService {
 
         if (!CACHE.containsKey(metastoreUrl)) {
             HCatClient hCatClient = getHCatClient(metastoreUrl);
-            LOG.info("Caching HCatalog client object for " + metastoreUrl);
+            LOG.info("Caching HCatalog client object for {}", metastoreUrl);
             CACHE.putIfAbsent(metastoreUrl, hCatClient);
         }
 
@@ -101,7 +102,7 @@ public class HiveCatalogService extends AbstractCatalogService {
                     hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true");
                 }
 
-                LOG.info("Creating and caching HCatalog client object for " + catalogUrl);
+                LOG.info("Creating and caching HCatalog client object for {}", catalogUrl);
                 UserGroupInformation currentUser = UserGroupInformation.getLoginUser();
                 HCatClient hcatClient = currentUser.doAs(new PrivilegedExceptionAction<HCatClient>() {
                     public HCatClient run() throws Exception {
@@ -122,7 +123,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public boolean isAlive(final String catalogUrl,
                            final String metaStorePrincipal) throws FalconException {
-        LOG.info("Checking if the service is alive for: " + catalogUrl);
+        LOG.info("Checking if the service is alive for: {}", catalogUrl);
 
         try {
             HCatClient client = getProxiedClient(catalogUrl, metaStorePrincipal);
@@ -136,7 +137,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public boolean tableExists(final String catalogUrl, final String database, final String tableName,
                                final String metaStorePrincipal) throws FalconException {
-        LOG.info("Checking if the table exists: " + tableName);
+        LOG.info("Checking if the table exists: {}", tableName);
 
         try {
             HCatClient client = getProxiedClient(catalogUrl, metaStorePrincipal);
@@ -150,7 +151,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public boolean isTableExternal(String catalogUrl, String database, String tableName)
         throws FalconException {
-        LOG.info("Checking if the table is external:" + tableName);
+        LOG.info("Checking if the table is external: {}", tableName);
 
         try {
             HCatClient client = get(catalogUrl);
@@ -165,7 +166,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     public List<CatalogPartition> listPartitionsByFilter(String catalogUrl, String database,
                                                          String tableName, String filter)
         throws FalconException {
-        LOG.info("List partitions for : " + tableName + ", partition filter: " + filter);
+        LOG.info("List partitions for: {}, partition filter: {}", tableName, filter);
 
         try {
             List<CatalogPartition> catalogPartitionList = new ArrayList<CatalogPartition>();
@@ -209,7 +210,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     public boolean dropPartitions(String catalogUrl, String database,
                                   String tableName, Map<String, String> partitions)
         throws FalconException {
-        LOG.info("Dropping partitions for : " + tableName + ", partitions: " + partitions);
+        LOG.info("Dropping partitions for: {}, partitions: {}", tableName, partitions);
 
         try {
             HCatClient client = get(catalogUrl);
@@ -224,7 +225,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public CatalogPartition getPartition(String catalogUrl, String database, String tableName,
                                          Map<String, String> partitionSpec) throws FalconException {
-        LOG.info("Fetch partition for : " + tableName + ", partition spec: " + partitionSpec);
+        LOG.info("Fetch partition for: {}, partition spec: {}", tableName, partitionSpec);
 
         try {
             HCatClient client = get(catalogUrl);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
index 20d46c3..ab85ae0 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
@@ -32,7 +32,8 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.jsp.el.ELException;
 import javax.servlet.jsp.el.ExpressionEvaluator;
@@ -44,7 +45,7 @@ import java.io.IOException;
  */
 public abstract class AbstractCleanupHandler {
 
-    protected static final Logger LOG = Logger.getLogger(AbstractCleanupHandler.class);
+    protected static final Logger LOG = LoggerFactory.getLogger(AbstractCleanupHandler.class);
 
     protected static final ConfigurationStore STORE = ConfigurationStore.get();
     public static final ExpressionEvaluator EVALUATOR = new ExpressionEvaluatorImpl();
@@ -99,7 +100,7 @@ public abstract class AbstractCleanupHandler {
     protected void delete(Cluster cluster, Entity entity, long retention, FileStatus[] logs)
         throws FalconException {
         if (logs == null || logs.length == 0) {
-            LOG.info("Nothing to delete for cluster: " + cluster.getName() + ", entity: " + entity.getName());
+            LOG.info("Nothing to delete for cluster: {}, entity: {}", cluster.getName(), entity.getName());
             return;
         }
 
@@ -110,9 +111,9 @@ public abstract class AbstractCleanupHandler {
                 try {
                     boolean isDeleted = getFileSystem(cluster).delete(log.getPath(), true);
                     if (!isDeleted) {
-                        LOG.error("Unable to delete path: " + log.getPath());
+                        LOG.error("Unable to delete path: {}", log.getPath());
                     } else {
-                        LOG.info("Deleted path: " + log.getPath());
+                        LOG.info("Deleted path: {}", log.getPath());
                     }
                     deleteParentIfEmpty(getFileSystem(cluster), log.getPath().getParent());
                 } catch (IOException e) {
@@ -121,10 +122,8 @@ public abstract class AbstractCleanupHandler {
                             + " for cluster: " + cluster.getName(), e);
                 }
             } else {
-                LOG.info("Retention limit: " + retention
-                        + " is less than modification"
-                        + (now - log.getModificationTime()) + " for path: "
-                        + log.getPath());
+                LOG.info("Retention limit: {} is less than modification {} for path: {}", retention,
+                        (now - log.getModificationTime()), log.getPath());
             }
         }
     }
@@ -132,7 +131,7 @@ public abstract class AbstractCleanupHandler {
     private void deleteParentIfEmpty(FileSystem fs, Path parent) throws IOException {
         FileStatus[] files = fs.listStatus(parent);
         if (files != null && files.length == 0) {
-            LOG.info("Parent path: " + parent + " is empty, deleting path");
+            LOG.info("Parent path: {} is empty, deleting path", parent);
             fs.delete(parent, true);
             deleteParentIfEmpty(fs, parent.getParent());
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
index ce96eb3..452ab02 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
@@ -51,13 +51,13 @@ public class FeedCleanupHandler extends AbstractCleanupHandler {
                 Cluster currentCluster = STORE.get(EntityType.CLUSTER,
                         cluster.getName());
                 if (currentCluster.getColo().equals(getCurrentColo())) {
-                    LOG.info("Cleaning up logs & staged data for feed:" + feedName
-                            + " in  cluster: " + cluster.getName() + " with retention: " + retention);
+                    LOG.info("Cleaning up logs & staged data for feed: {} in cluster: {} with retention: {}", feedName,
+                            cluster.getName(), retention);
                     delete(currentCluster, feed, retention);
                     deleteStagedData(currentCluster, feed, retention);
                 } else {
-                    LOG.info("Ignoring cleanup for feed:" + feedName
-                            + " in  cluster: " + cluster.getName() + " as this does not belong to current colo");
+                    LOG.info("Ignoring cleanup for feed: {} in cluster: {} as this does not belong to current colo",
+                            feedName, cluster.getName());
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
index add1237..e6ce72f 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
@@ -44,12 +44,12 @@ public class ProcessCleanupHandler extends AbstractCleanupHandler {
                 Cluster currentCluster = STORE.get(EntityType.CLUSTER,
                         cluster.getName());
                 if (currentCluster.getColo().equals(getCurrentColo())) {
-                    LOG.info("Cleaning up logs for process:" + processName
-                            + " in  cluster: " + cluster.getName() + " with retention: " + retention);
+                    LOG.info("Cleaning up logs for process: {} in cluster: {} with retention: {}",
+                            processName, cluster.getName(), retention);
                     delete(currentCluster, process, retention);
                 } else {
-                    LOG.info("Ignoring cleanup for process:" + processName
-                            + " in  cluster: " + cluster.getName() + " as this does not belong to current colo");
+                    LOG.info("Ignoring cleanup for process: {} in cluster: {} as this does not belong to current colo",
+                            processName, cluster.getName());
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
index 831bfdc..584b867 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
@@ -39,14 +39,15 @@ import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Parser that parses cluster entity definition.
  */
 public class ClusterEntityParser extends EntityParser<Cluster> {
 
-    private static final Logger LOG = Logger.getLogger(ProcessEntityParser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ProcessEntityParser.class);
 
     public ClusterEntityParser() {
         super(EntityType.CLUSTER);
@@ -87,14 +88,14 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
 
     private void validateReadInterface(Cluster cluster) throws ValidationException {
         final String readOnlyStorageUrl = ClusterHelper.getReadOnlyStorageUrl(cluster);
-        LOG.info("Validating read interface: " + readOnlyStorageUrl);
+        LOG.info("Validating read interface: {}", readOnlyStorageUrl);
 
         validateFileSystem(cluster, readOnlyStorageUrl);
     }
 
     private void validateWriteInterface(Cluster cluster) throws ValidationException {
         final String writeStorageUrl = ClusterHelper.getStorageUrl(cluster);
-        LOG.info("Validating write interface: " + writeStorageUrl);
+        LOG.info("Validating write interface: {}", writeStorageUrl);
 
         validateFileSystem(cluster, writeStorageUrl);
     }
@@ -123,7 +124,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
 
     private void validateExecuteInterface(Cluster cluster) throws ValidationException {
         String executeUrl = ClusterHelper.getMREndPoint(cluster);
-        LOG.info("Validating execute interface: " + executeUrl);
+        LOG.info("Validating execute interface: {}", executeUrl);
 
         try {
             HadoopClientFactory.validateJobClient(executeUrl);
@@ -134,7 +135,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
 
     private void validateWorkflowInterface(Cluster cluster) throws ValidationException {
         final String workflowUrl = ClusterHelper.getOozieUrl(cluster);
-        LOG.info("Validating workflow interface: " + workflowUrl);
+        LOG.info("Validating workflow interface: {}", workflowUrl);
 
         try {
             if (!WorkflowEngineFactory.getWorkflowEngine().isAlive(cluster)) {
@@ -149,7 +150,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         final String messagingUrl = ClusterHelper.getMessageBrokerUrl(cluster);
         final String implementation = StartupProperties.get().getProperty(
                 "broker.impl.class", "org.apache.activemq.ActiveMQConnectionFactory");
-        LOG.info("Validating messaging interface: " + messagingUrl + ", implementation: " + implementation);
+        LOG.info("Validating messaging interface: {}, implementation: {}", messagingUrl, implementation);
 
         try {
             @SuppressWarnings("unchecked")
@@ -173,12 +174,12 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         // continue validation only if a catalog service is provided
         final Interface catalogInterface = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY);
         if (catalogInterface == null) {
-            LOG.info("Catalog service is not enabled for cluster: " + cluster.getName());
+            LOG.info("Catalog service is not enabled for cluster: {}", cluster.getName());
             return;
         }
 
         final String catalogUrl = catalogInterface.getEndpoint();
-        LOG.info("Validating catalog registry interface: " + catalogUrl);
+        LOG.info("Validating catalog registry interface: {}", catalogUrl);
 
         try {
             String metaStorePrincipal = null;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
index 0df831d..8da5139 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
@@ -24,7 +24,8 @@ import org.apache.falcon.Pair;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.Unmarshaller;
 import java.io.ByteArrayInputStream;
@@ -39,7 +40,7 @@ import java.util.List;
  */
 public abstract class EntityParser<T extends Entity> {
 
-    private static final Logger LOG = Logger.getLogger(EntityParser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityParser.class);
 
     private final EntityType entityType;
 
@@ -80,7 +81,7 @@ public abstract class EntityParser<T extends Entity> {
             // parse against schema
             Unmarshaller unmarshaller = entityType.getUnmarshaller();
             T entity = (T) unmarshaller.unmarshal(xmlStream);
-            LOG.info("Parsed Entity: " + entity.getName());
+            LOG.info("Parsed Entity: {}", entity.getName());
             return entity;
         } catch (Exception e) {
             throw new FalconException(e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
index 8911b33..ccdead9 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
@@ -41,7 +41,8 @@ import org.apache.falcon.expression.ExpressionHelper;
 import org.apache.falcon.group.FeedGroup;
 import org.apache.falcon.group.FeedGroupMap;
 import org.apache.falcon.security.SecurityUtil;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Date;
 import java.util.HashSet;
@@ -53,7 +54,7 @@ import java.util.TimeZone;
  */
 public class FeedEntityParser extends EntityParser<Feed> {
 
-    private static final Logger LOG = Logger.getLogger(FeedEntityParser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FeedEntityParser.class);
 
     public FeedEntityParser() {
         super(EntityType.FEED);
@@ -178,8 +179,8 @@ public class FeedEntityParser extends EntityParser<Feed> {
                     CrossEntityValidations.validateInstance(process, output, newFeed);
                 }
             }
-            LOG.debug("Verified and found " + process.getName() + " to be valid for new definition of "
-                    + newFeed.getName());
+            LOG.debug("Verified and found {} to be valid for new definition of {}",
+                    process.getName(), newFeed.getName());
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java b/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
index c2f1d44..0534cc4 100644
--- a/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
+++ b/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
@@ -31,7 +31,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.JAXBException;
 import java.io.IOException;
@@ -52,8 +53,8 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public final class ConfigurationStore implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(ConfigurationStore.class);
-    private static final Logger AUDIT = Logger.getLogger("AUDIT");
+    private static final Logger LOG = LoggerFactory.getLogger(ConfigurationStore.class);
+    private static final Logger AUDIT = LoggerFactory.getLogger("AUDIT");
     private static final String UTF_8 = "UTF-8";
 
     private static final ConfigurationStore STORE = new ConfigurationStore();
@@ -98,7 +99,7 @@ public final class ConfigurationStore implements FalconService {
         try {
             FileSystem fileSystem = HadoopClientFactory.get().createFileSystem(storePath.toUri());
             if (!fileSystem.exists(storePath)) {
-                LOG.info("Creating configuration store directory: " + storePath);
+                LOG.info("Creating configuration store directory: {}", storePath);
                 fileSystem.mkdirs(storePath);
                 // set permissions so config store dir is owned by falcon alone
                 FsPermission permission = new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
@@ -311,9 +312,9 @@ public final class ConfigurationStore implements FalconService {
                         type + Path.SEPARATOR + URLEncoder.encode(entity.getName(), UTF_8) + ".xml"));
         try {
             type.getMarshaller().marshal(entity, out);
-            LOG.info("Persisted configuration " + type + "/" + entity.getName());
+            LOG.info("Persisted configuration {}/{}", type, entity.getName());
         } catch (JAXBException e) {
-            LOG.error(e);
+            LOG.error("Unable to serialize the entity object {}/{}", type, entity.getName(), e);
             throw new StoreAccessException("Unable to serialize the entity object " + type + "/" + entity.getName(), e);
         } finally {
             out.close();
@@ -332,7 +333,7 @@ public final class ConfigurationStore implements FalconService {
         fs.mkdirs(archivePath);
         fs.rename(new Path(storePath, type + Path.SEPARATOR + URLEncoder.encode(name, UTF_8) + ".xml"),
                 new Path(archivePath, URLEncoder.encode(name, UTF_8) + "." + System.currentTimeMillis()));
-        LOG.info("Archived configuration " + type + "/" + name);
+        LOG.info("Archived configuration {}/{}", type, name);
     }
 
     /**
@@ -354,7 +355,7 @@ public final class ConfigurationStore implements FalconService {
             throw new StoreAccessException("Unable to un-marshall xml definition for " + type + "/" + name, e);
         } finally {
             in.close();
-            LOG.info("Restored configuration " + type + "/" + name);
+            LOG.info("Restored configuration {}/{}", type, name);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java b/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
index 75cab5d..444e28d 100644
--- a/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
+++ b/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
@@ -26,7 +26,8 @@ import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.entity.v0.process.Output;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.service.ConfigurationChangeListener;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.HashSet;
@@ -39,7 +40,7 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public final class EntityGraph implements ConfigurationChangeListener {
 
-    private static final Logger LOG = Logger.getLogger(EntityGraph.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityGraph.class);
 
     private static EntityGraph instance = new EntityGraph();
 
@@ -83,7 +84,7 @@ public final class EntityGraph implements ConfigurationChangeListener {
         if (nodeEdges == null) {
             return;
         }
-        LOG.trace("Adding edges for " + entity.getName() + ": " + nodeEdges);
+        LOG.trace("Adding edges for {}: {}", entity.getName(), nodeEdges);
 
         for (Map.Entry<Node, Set<Node>> entry : nodeEdges.entrySet()) {
             if (graph.containsKey(entry.getKey())) {
@@ -92,7 +93,7 @@ public final class EntityGraph implements ConfigurationChangeListener {
                 graph.put(entry.getKey(), entry.getValue());
             }
         }
-        LOG.trace("Merged edges to graph " + entity.getName());
+        LOG.trace("Merged edges to graph {}", entity.getName());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java b/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
index 2d3ec95..2f46ff4 100644
--- a/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
@@ -29,7 +29,8 @@ import org.apache.falcon.entity.v0.process.Output;
 import org.apache.falcon.entity.v0.process.Outputs;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Workflow;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -39,7 +40,7 @@ import java.util.List;
  */
 public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
 
-    private static final Logger LOG = Logger.getLogger(EntityRelationshipGraphBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityRelationshipGraphBuilder.class);
 
 
     public EntityRelationshipGraphBuilder(Graph graph, boolean preserveHistory) {
@@ -47,7 +48,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void addClusterEntity(Cluster clusterEntity) {
-        LOG.info("Adding cluster entity: " + clusterEntity.getName());
+        LOG.info("Adding cluster entity: {}", clusterEntity.getName());
         Vertex clusterVertex = addVertex(clusterEntity.getName(), RelationshipType.CLUSTER_ENTITY);
 
         addColoRelation(clusterEntity.getColo(), clusterVertex);
@@ -55,7 +56,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void addFeedEntity(Feed feed) {
-        LOG.info("Adding feed entity: " + feed.getName());
+        LOG.info("Adding feed entity: {}", feed.getName());
         Vertex feedVertex = addVertex(feed.getName(), RelationshipType.FEED_ENTITY);
 
         addUserRelation(feedVertex);
@@ -68,11 +69,11 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void updateFeedEntity(Feed oldFeed, Feed newFeed) {
-        LOG.info("Updating feed entity: " + newFeed.getName());
+        LOG.info("Updating feed entity: {}", newFeed.getName());
         Vertex feedEntityVertex = findVertex(oldFeed.getName(), RelationshipType.FEED_ENTITY);
         if (feedEntityVertex == null) {
             // todo - throw new IllegalStateException(oldFeed.getName() + " entity vertex must exist.");
-            LOG.error("Illegal State: Feed entity vertex must exist for " + oldFeed.getName());
+            LOG.error("Illegal State: Feed entity vertex must exist for {}", oldFeed.getName());
             return;
         }
 
@@ -84,7 +85,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
 
     public void addProcessEntity(Process process) {
         String processName = process.getName();
-        LOG.info("Adding process entity: " + processName);
+        LOG.info("Adding process entity: {}", processName);
         Vertex processVertex = addVertex(processName, RelationshipType.PROCESS_ENTITY);
         addWorkflowProperties(process.getWorkflow(), processVertex, processName);
 
@@ -100,11 +101,11 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void updateProcessEntity(Process oldProcess, Process newProcess) {
-        LOG.info("Updating process entity: " + newProcess.getName());
+        LOG.info("Updating process entity: {}", newProcess.getName());
         Vertex processEntityVertex = findVertex(oldProcess.getName(), RelationshipType.PROCESS_ENTITY);
         if (processEntityVertex == null) {
             // todo - throw new IllegalStateException(oldProcess.getName() + " entity vertex must exist");
-            LOG.error("Illegal State: Process entity vertex must exist for " + oldProcess.getName());
+            LOG.error("Illegal State: Process entity vertex must exist for {}", oldProcess.getName());
             return;
         }
 
@@ -126,7 +127,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
         Vertex clusterVertex = findVertex(clusterName, RelationshipType.CLUSTER_ENTITY);
         if (clusterVertex == null) { // cluster must exist before adding other entities
             // todo - throw new IllegalStateException("Cluster entity vertex must exist: " + clusterName);
-            LOG.error("Illegal State: Cluster entity vertex must exist for " + clusterName);
+            LOG.error("Illegal State: Cluster entity vertex must exist for {}", clusterName);
             return;
         }
 
@@ -157,7 +158,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
         Vertex feedVertex = findVertex(feedName, RelationshipType.FEED_ENTITY);
         if (feedVertex == null) {
             // todo - throw new IllegalStateException("Feed entity vertex must exist: " + feedName);
-            LOG.error("Illegal State: Feed entity vertex must exist for " + feedName);
+            LOG.error("Illegal State: Feed entity vertex must exist for {}", feedName);
             return;
         }
 
@@ -177,7 +178,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
             return;
         }
 
-        LOG.info("Updating workflow properties for: " + processEntityVertex);
+        LOG.info("Updating workflow properties for: {}", processEntityVertex);
         addWorkflowProperties(newWorkflow, processEntityVertex, processName);
     }
 
@@ -372,7 +373,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
         Vertex feedVertex = findVertex(feedName, RelationshipType.FEED_ENTITY);
         if (feedVertex == null) {
             // todo - throw new IllegalStateException("Feed entity vertex must exist: " + feedName);
-            LOG.error("Illegal State: Feed entity vertex must exist for " + feedName);
+            LOG.error("Illegal State: Feed entity vertex must exist for {}", feedName);
             return;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java b/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
index 24bf30f..8bec02f 100644
--- a/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
+++ b/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
@@ -23,7 +23,8 @@ import com.tinkerpop.blueprints.Edge;
 import com.tinkerpop.blueprints.Graph;
 import com.tinkerpop.blueprints.Vertex;
 import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -33,18 +34,18 @@ import java.io.OutputStream;
  */
 public final class GraphUtils {
 
-    private static final Logger LOG = Logger.getLogger(GraphUtils.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class);
 
     private GraphUtils() {
     }
 
     public static void dumpToLog(final Graph graph) {
-        LOG.debug("Vertices of " + graph);
+        LOG.debug("Vertices of {}", graph);
         for (Vertex vertex : graph.getVertices()) {
             LOG.debug(vertexString(vertex));
         }
 
-        LOG.debug("Edges of " + graph);
+        LOG.debug("Edges of {}", graph);
         for (Edge edge : graph.getEdges()) {
             LOG.debug(edgeString(edge));
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java b/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
index eb591c0..0bbfd08 100644
--- a/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
@@ -32,7 +32,8 @@ import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.LocationType;
 import org.apache.falcon.entity.v0.process.Process;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.net.URISyntaxException;
 import java.util.Map;
@@ -42,7 +43,7 @@ import java.util.Map;
  */
 public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
 
-    private static final Logger LOG = Logger.getLogger(InstanceRelationshipGraphBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(InstanceRelationshipGraphBuilder.class);
 
     private static final String PROCESS_INSTANCE_FORMAT = "yyyy-MM-dd-HH-mm"; // nominal time
     private static final String FEED_INSTANCE_FORMAT = "yyyyMMddHHmm"; // computed
@@ -67,7 +68,7 @@ public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
         String entityName = lineageMetadata.get(LineageArgs.ENTITY_NAME.getOptionName());
         String processInstanceName = getProcessInstanceName(entityName,
                 lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
-        LOG.info("Adding process instance: " + processInstanceName);
+        LOG.info("Adding process instance: {}", processInstanceName);
 
         String timestamp = getTimestamp(lineageMetadata);
         Vertex processInstance = addVertex(processInstanceName, RelationshipType.PROCESS_INSTANCE, timestamp);
@@ -111,10 +112,10 @@ public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
     public void addInstanceToEntity(Vertex instanceVertex, String entityName,
                                     RelationshipType entityType, RelationshipLabel edgeLabel) {
         Vertex entityVertex = findVertex(entityName, entityType);
-        LOG.info("Vertex exists? name=" + entityName + ", type=" + entityType + ", v=" + entityVertex);
+        LOG.info("Vertex exists? name={}, type={}, v={}", entityName, entityType, entityVertex);
         if (entityVertex == null) {
             // todo - throw new IllegalStateException(entityType + " entity vertex must exist " + entityName);
-            LOG.error("Illegal State: " + entityType + " vertex must exist for " + entityName);
+            LOG.error("Illegal State: {} vertex must exist for {}", entityType, entityName);
             return;
         }
 
@@ -161,10 +162,10 @@ public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
             String feedName = feedNames[index];
             String feedInstancePath = feedInstancePaths[index];
 
-            LOG.info("Computing feed instance for : name=" + feedName + ", path= "
-                    + feedInstancePath + ", in cluster: " + clusterName);
+            LOG.info("Computing feed instance for : name={}, path={}, in cluster: {}",
+                    feedName, feedInstancePath, clusterName);
             String feedInstanceName = getFeedInstanceName(feedName, clusterName, feedInstancePath);
-            LOG.info("Adding feed instance: " + feedInstanceName);
+            LOG.info("Adding feed instance: {}", feedInstanceName);
             Vertex feedInstance = addVertex(feedInstanceName, RelationshipType.FEED_INSTANCE,
                     getTimestamp(lineageMetadata));
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java b/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
index 9f6965a..8a946ad 100644
--- a/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
@@ -32,8 +32,9 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
 import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -47,7 +48,7 @@ import java.util.Map;
  */
 public class LineageRecorder  extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(LineageRecorder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LineageRecorder.class);
 
     public static void main(String[] args) throws Exception {
         ToolRunner.run(new LineageRecorder(), args);
@@ -57,15 +58,15 @@ public class LineageRecorder  extends Configured implements Tool {
     public int run(String[] arguments) throws Exception {
         CommandLine command = getCommand(arguments);
 
-        LOG.info("Parsing lineage metadata from: " + command);
+        LOG.info("Parsing lineage metadata from: {}", command);
         Map<String, String> lineageMetadata = getLineageMetadata(command);
-        LOG.info("Lineage Metadata: " + lineageMetadata);
+        LOG.info("Lineage Metadata: {}", lineageMetadata);
 
         String lineageFile = getFilePath(command.getOptionValue(LineageArgs.LOG_DIR.getOptionName()),
                 command.getOptionValue(LineageArgs.ENTITY_NAME.getOptionName())
         );
 
-        LOG.info("Persisting lineage metadata to: " + lineageFile);
+        LOG.info("Persisting lineage metadata to: {}", lineageFile);
         persistLineageMetadata(lineageMetadata, lineageFile);
 
         return 0;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java b/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
index 21c22de..5df4611 100644
--- a/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
+++ b/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
@@ -37,7 +37,8 @@ import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.service.ConfigurationChangeListener;
 import org.apache.falcon.service.FalconService;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 import java.util.Properties;
@@ -48,7 +49,7 @@ import java.util.Set;
  */
 public class MetadataMappingService implements FalconService, ConfigurationChangeListener {
 
-    private static final Logger LOG = Logger.getLogger(MetadataMappingService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(MetadataMappingService.class);
 
     /**
      * Constance for the service name.
@@ -77,13 +78,13 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
         graph = initializeGraphDB();
         createIndicesForVertexKeys();
         // todo - create Edge Cardinality Constraints
-        LOG.info("Initialized graph db: " + graph);
+        LOG.info("Initialized graph db: {}", graph);
 
         vertexIndexedKeys = getIndexableGraph().getIndexedKeys(Vertex.class);
-        LOG.info("Init vertex property keys: " + vertexIndexedKeys);
+        LOG.info("Init vertex property keys: {}", vertexIndexedKeys);
 
         edgeIndexedKeys = getIndexableGraph().getIndexedKeys(Edge.class);
-        LOG.info("Init edge property keys: " + edgeIndexedKeys);
+        LOG.info("Init edge property keys: {}", edgeIndexedKeys);
 
         boolean preserveHistory = Boolean.valueOf(StartupProperties.get().getProperty(
                 "falcon.graph.preserve.history", "false"));
@@ -185,7 +186,7 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
     @Override
     public void onAdd(Entity entity) throws FalconException {
         EntityType entityType = entity.getEntityType();
-        LOG.info("Adding lineage for entity: " + entity.getName() + ", type: " + entityType);
+        LOG.info("Adding lineage for entity: {}, type: {}", entity.getName(), entityType);
 
         switch (entityType) {
         case CLUSTER:
@@ -216,7 +217,7 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
     @Override
     public void onChange(Entity oldEntity, Entity newEntity) throws FalconException {
         EntityType entityType = newEntity.getEntityType();
-        LOG.info("Updating lineage for entity: " + newEntity.getName() + ", type: " + entityType);
+        LOG.info("Updating lineage for entity: {}, type: {}", newEntity.getName(), entityType);
 
         switch (entityType) {
         case CLUSTER:
@@ -254,12 +255,12 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
                                                String logDir) throws FalconException {
         String lineageFile = LineageRecorder.getFilePath(logDir, entityName);
 
-        LOG.info("Parsing lineage metadata from: " + lineageFile);
+        LOG.info("Parsing lineage metadata from: {}", lineageFile);
         Map<String, String> lineageMetadata = LineageRecorder.parseLineageMetadata(lineageFile);
 
         EntityOperations entityOperation = EntityOperations.valueOf(operation);
 
-        LOG.info("Adding lineage for entity: " + entityName + ", operation: " + operation);
+        LOG.info("Adding lineage for entity: {}, operation: {}", entityName, operation);
         switch (entityOperation) {
         case GENERATE:
             onProcessInstanceAdded(lineageMetadata);
@@ -285,12 +286,12 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
     }
 
     private void onFeedInstanceReplicated(Map<String, String> lineageMetadata) {
-        LOG.info("Adding replicated feed instance: " + lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
+        LOG.info("Adding replicated feed instance: {}", lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
         // todo - tbd
     }
 
     private void onFeedInstanceEvicted(Map<String, String> lineageMetadata) {
-        LOG.info("Adding evicted feed instance: " + lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
+        LOG.info("Adding evicted feed instance: {}", lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
         // todo - tbd
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java b/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
index 7baeeec..9ee0ea6 100644
--- a/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
@@ -25,7 +25,8 @@ import com.tinkerpop.blueprints.GraphQuery;
 import com.tinkerpop.blueprints.Vertex;
 import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.security.CurrentUser;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Date;
 import java.util.Iterator;
@@ -36,7 +37,7 @@ import java.util.Map;
  */
 public abstract class RelationshipGraphBuilder {
 
-    private static final Logger LOG = Logger.getLogger(RelationshipGraphBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(RelationshipGraphBuilder.class);
 
     /**
      * A blueprints graph.
@@ -65,10 +66,7 @@ public abstract class RelationshipGraphBuilder {
     public Vertex addVertex(String name, RelationshipType type) {
         Vertex vertex = findVertex(name, type);
         if (vertex != null) {
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Found an existing vertex for: name=" + name + ", type=" + type);
-            }
-
+            LOG.debug("Found an existing vertex for: name={}, type={}", name, type);
             return vertex;
         }
 
@@ -78,10 +76,7 @@ public abstract class RelationshipGraphBuilder {
     protected Vertex addVertex(String name, RelationshipType type, String timestamp) {
         Vertex vertex = findVertex(name, type);
         if (vertex != null) {
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Found an existing vertex for: name=" + name + ", type=" + type);
-            }
-
+            LOG.debug("Found an existing vertex for: name={}, type={}", name, type);
             return vertex;
         }
 
@@ -89,9 +84,7 @@ public abstract class RelationshipGraphBuilder {
     }
 
     protected Vertex findVertex(String name, RelationshipType type) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Finding vertex for: name=" + name + ", type=" + type);
-        }
+        LOG.debug("Finding vertex for: name={}, type={}", name, type);
 
         GraphQuery query = graph.query()
                 .has(RelationshipProperty.NAME.getName(), name)
@@ -105,9 +98,7 @@ public abstract class RelationshipGraphBuilder {
     }
 
     protected Vertex createVertex(String name, RelationshipType type, String timestamp) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Creating a new vertex for: name=" + name + ", type=" + type);
-        }
+        LOG.debug("Creating a new vertex for: name={}, type={}", name, type);
 
         Vertex vertex = graph.addVertex(null);
         vertex.setProperty(RelationshipProperty.NAME.getName(), name);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java b/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
index 264d5b8..fbed283 100644
--- a/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
+++ b/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
@@ -25,7 +25,8 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.util.Properties;
@@ -37,7 +38,7 @@ import java.util.Properties;
  */
 public class AuthenticationInitializationService implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(AuthenticationInitializationService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AuthenticationInitializationService.class);
 
     /**
      * Constant for the configuration property that indicates the prefix.
@@ -93,8 +94,7 @@ public class AuthenticationInitializationService implements FalconService {
             UserGroupInformation.setConfiguration(conf);
             UserGroupInformation.loginUserFromKeytab(principal, keytabFilePath);
 
-            LOG.info("Got Kerberos ticket, keytab: " + keytabFilePath
-                    + ", Falcon principal principal: " + principal);
+            LOG.info("Got Kerberos ticket, keytab: {}, Falcon principal: {}", keytabFilePath, principal);
         } catch (Exception ex) {
             throw new FalconException("Could not initialize " + getName()
                     + ": " + ex.getMessage(), ex);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/security/CurrentUser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/CurrentUser.java b/common/src/main/java/org/apache/falcon/security/CurrentUser.java
index cd7d0b0..6fccd1b 100644
--- a/common/src/main/java/org/apache/falcon/security/CurrentUser.java
+++ b/common/src/main/java/org/apache/falcon/security/CurrentUser.java
@@ -18,7 +18,8 @@
 
 package org.apache.falcon.security;
 
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 
@@ -27,7 +28,7 @@ import javax.security.auth.Subject;
  */
 public final class CurrentUser {
 
-    private static final Logger LOG = Logger.getLogger(CurrentUser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(CurrentUser.class);
 
     private static final CurrentUser INSTANCE = new CurrentUser();
 
@@ -49,7 +50,7 @@ public final class CurrentUser {
 
         Subject subject = new Subject();
         subject.getPrincipals().add(new FalconPrincipal(user));
-        LOG.info("Logging in " + user);
+        LOG.info("Logging in {}", user);
         INSTANCE.currentSubject.set(subject);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/service/LogCleanupService.java b/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
index 689c8a0..a32eaa7 100644
--- a/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
+++ b/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
@@ -32,14 +32,15 @@ import org.apache.falcon.cleanup.FeedCleanupHandler;
 import org.apache.falcon.cleanup.ProcessCleanupHandler;
 import org.apache.falcon.expression.ExpressionHelper;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Log cleanup service.
  */
 public class LogCleanupService implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(LogCleanupService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LogCleanupService.class);
     private final ExpressionEvaluator evaluator = new ExpressionEvaluatorImpl();
     private final ExpressionHelper resolver = ExpressionHelper.get();
 
@@ -64,7 +65,7 @@ public class LogCleanupService implements FalconService {
         @Override
         public void run() {
             try {
-                LOG.info("Cleaning up logs at: " + new Date());
+                LOG.info("Cleaning up logs at: {}", new Date());
                 processCleanupHandler.cleanup();
                 feedCleanupHandler.cleanup();
             } catch (Throwable t) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java b/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
index 5e0256e..4708b94 100644
--- a/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
+++ b/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
@@ -21,14 +21,15 @@ package org.apache.falcon.service;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.util.ReflectionUtils;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Initializer that Falcon uses at startup to bring up all the falcon startup services.
  */
 public class ServiceInitializer {
 
-    private static final Logger LOG = Logger.getLogger(ServiceInitializer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ServiceInitializer.class);
     private final Services services = Services.get();
 
     public void initialize() throws FalconException {
@@ -41,27 +42,27 @@ public class ServiceInitializer {
             }
             FalconService service = ReflectionUtils.getInstanceByClassName(serviceClassName);
             services.register(service);
-            LOG.info("Initializing service : " + serviceClassName);
+            LOG.info("Initializing service: {}", serviceClassName);
             try {
                 service.init();
             } catch (Throwable t) {
-                LOG.fatal("Failed to initialize service " + serviceClassName, t);
+                LOG.error("Failed to initialize service {}", serviceClassName, t);
                 throw new FalconException(t);
             }
-            LOG.info("Service initialized : " + serviceClassName);
+            LOG.info("Service initialized: {}", serviceClassName);
         }
     }
 
     public void destroy() throws FalconException {
         for (FalconService service : services) {
-            LOG.info("Destroying service : " + service.getClass().getName());
+            LOG.info("Destroying service: {}", service.getClass().getName());
             try {
                 service.destroy();
             } catch (Throwable t) {
-                LOG.fatal("Failed to destroy service " + service.getClass().getName(), t);
+                LOG.error("Failed to destroy service {}", service.getClass().getName(), t);
                 throw new FalconException(t);
             }
-            LOG.info("Service destroyed : " + service.getClass().getName());
+            LOG.info("Service destroyed: {}", service.getClass().getName());
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/update/UpdateHelper.java b/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
index dda6bb3..7af77d0 100644
--- a/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
+++ b/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
@@ -36,7 +36,8 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -48,7 +49,7 @@ import java.util.Map;
  * Helper methods to facilitate entity updates.
  */
 public final class UpdateHelper {
-    private static final Logger LOG = Logger.getLogger(UpdateHelper.class);
+    private static final Logger LOG = LoggerFactory.getLogger(UpdateHelper.class);
 
     private static final String[] FEED_FIELDS = new String[]{"partitions", "groups", "lateArrival.cutOff",
                                                              "schema.location", "schema.provider",
@@ -153,7 +154,7 @@ public final class UpdateHelper {
                 if (dest != null) {
                     Path target = new Path(dest, src.getName());
                     FileUtil.copy(fs, src, fs, target, false, conf);
-                    LOG.debug("Copied " + src + " to " + target);
+                    LOG.debug("Copied {} to {}", src, target);
                 }
             } else {
                 FileStatus[] files = fs.listStatus(src);
@@ -184,17 +185,17 @@ public final class UpdateHelper {
 
             //check if affectedProcess is defined for this cluster
             if (ProcessHelper.getCluster(affectedProcess, cluster) == null) {
-                LOG.debug("Process " + affectedProcess.getName() + " is not defined for cluster " + cluster);
+                LOG.debug("Process {} is not defined for cluster {}", affectedProcess.getName(), cluster);
                 return false;
             }
 
             if (!oldFeed.getFrequency().equals(newFeed.getFrequency())) {
-                LOG.debug(oldFeed.toShortString() + ": Frequency has changed. Updating...");
+                LOG.debug("{}: Frequency has changed. Updating...", oldFeed.toShortString());
                 return true;
             }
 
             if (!StringUtils.equals(oldFeed.getAvailabilityFlag(), newFeed.getAvailabilityFlag())) {
-                LOG.debug(oldFeed.toShortString() + ": Availability flag has changed. Updating...");
+                LOG.debug("{}: Availability flag has changed. Updating...", oldFeed.toShortString());
                 return true;
             }
 
@@ -202,7 +203,7 @@ public final class UpdateHelper {
             Storage newFeedStorage = FeedHelper.createStorage(cluster, newFeed);
 
             if (!oldFeedStorage.isIdentical(newFeedStorage)) {
-                LOG.debug(oldFeed.toShortString() + ": Storage has changed. Updating...");
+                LOG.debug("{}: Storage has changed. Updating...", oldFeed.toShortString());
                 return true;
             }
             return false;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java b/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
index d7fb10b..bd87e0d 100644
--- a/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
+++ b/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
@@ -21,7 +21,8 @@ package org.apache.falcon.util;
 import org.apache.commons.io.IOUtils;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.expression.ExpressionHelper;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -38,7 +39,7 @@ import java.util.Set;
  */
 public abstract class ApplicationProperties extends Properties {
 
-    private static final Logger LOG = Logger.getLogger(ApplicationProperties.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ApplicationProperties.class);
 
     protected abstract String getPropertyFile();
 
@@ -103,7 +104,7 @@ public abstract class ApplicationProperties extends Properties {
         if (confDir != null) {
             File fileToLoad = new File(confDir, propertyFileName);
             if (fileToLoad.exists() && fileToLoad.isFile() && fileToLoad.canRead()) {
-                LOG.info("config.location is set, using: " + confDir + "/" + propertyFileName);
+                LOG.info("config.location is set, using: {}/{}", confDir, propertyFileName);
                 resourceAsStream = new FileInputStream(fileToLoad);
             }
         }
@@ -116,12 +117,12 @@ public abstract class ApplicationProperties extends Properties {
         Class clazz = ApplicationProperties.class;
         URL resource = clazz.getResource("/" + propertyFileName);
         if (resource != null) {
-            LOG.info("Fallback to classpath for: " + resource);
+            LOG.info("Fallback to classpath for: {}", resource);
             resourceAsStream = clazz.getResourceAsStream("/" + propertyFileName);
         } else {
             resource = clazz.getResource(propertyFileName);
             if (resource != null) {
-                LOG.info("Fallback to classpath for: " + resource);
+                LOG.info("Fallback to classpath for: {}", resource);
                 resourceAsStream = clazz.getResourceAsStream(propertyFileName);
             }
         }
@@ -140,13 +141,13 @@ public abstract class ApplicationProperties extends Properties {
             }
         }
 
-        LOG.info("Initializing " + this.getClass().getName() + " properties with domain " + domain);
+        LOG.info("Initializing {} properties with domain {}", this.getClass().getName(), domain);
         Set<String> keys = getKeys(origProps.keySet());
         for (String key : keys) {
             String value = origProps.getProperty(domain + "." + key, origProps.getProperty("*." + key));
             if (value != null) {
                 value = ExpressionHelper.substitute(value);
-                LOG.debug(key + "=" + value);
+                LOG.debug("{}={}", key, value);
                 put(key, value);
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java b/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
index eca2912..5d65073 100644
--- a/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
+++ b/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
@@ -19,7 +19,8 @@
 package org.apache.falcon.util;
 
 import org.apache.falcon.entity.ColoClusterRelation;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashSet;
 import java.util.Set;
@@ -28,7 +29,7 @@ import java.util.Set;
  * Helper methods to deployment properties.
  */
 public final class DeploymentUtil {
-    private static final Logger LOG = Logger.getLogger(DeploymentUtil.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DeploymentUtil.class);
 
     protected static final String DEFAULT_COLO = "default";
     protected static final String EMBEDDED = "embedded";
@@ -49,8 +50,8 @@ public final class DeploymentUtil {
             CURRENT_COLO = StartupProperties.get().
                     getProperty("current.colo", DEFAULT_COLO);
         }
-        LOG.info("Running in embedded mode? " + EMBEDDED_MODE);
-        LOG.info("Current colo: " + CURRENT_COLO);
+        LOG.info("Running in embedded mode? {}", EMBEDDED_MODE);
+        LOG.info("Current colo: {}", CURRENT_COLO);
     }
 
     private DeploymentUtil() {}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java b/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
index 6ec2f70..87b67d0 100644
--- a/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
+++ b/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
@@ -19,7 +19,8 @@
 package org.apache.falcon.util;
 
 import org.apache.falcon.FalconException;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Properties;
 import java.util.concurrent.atomic.AtomicReference;
@@ -29,7 +30,7 @@ import java.util.concurrent.atomic.AtomicReference;
  */
 public final class RuntimeProperties extends ApplicationProperties {
 
-    private static final Logger LOG = Logger.getLogger(RuntimeProperties.class);
+    private static final Logger LOG = LoggerFactory.getLogger(RuntimeProperties.class);
 
     private static final String PROPERTY_FILE = "runtime.properties";
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java b/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
index f466711..288fdfa 100644
--- a/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
@@ -27,7 +27,8 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.testng.Assert;
 import org.testng.annotations.AfterSuite;
 import org.testng.annotations.BeforeSuite;
@@ -40,7 +41,7 @@ import java.io.IOException;
  */
 public class ConfigurationStoreTest {
 
-    private static final Logger LOG = Logger.getLogger(ConfigurationStoreTest.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ConfigurationStoreTest.class);
 
     private ConfigurationStore store = ConfigurationStore.get();
     private TestListener listener = new TestListener();
@@ -121,6 +122,6 @@ public class ConfigurationStoreTest {
                 getProperty("config.store.uri"));
         FileSystem fs = FileSystem.get(path.toUri(), new Configuration());
         fs.delete(path, true);
-        LOG.info("Cleaned up " + path);
+        LOG.info("Cleaned up {}", path);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java b/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
index 8d5df88..16bff02 100644
--- a/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
+++ b/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
@@ -52,7 +52,8 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -69,7 +70,7 @@ import java.util.Properties;
  * Workflow definition builder for feed replication & retention.
  */
 public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
-    private static final Logger LOG = Logger.getLogger(OozieFeedWorkflowBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieFeedWorkflowBuilder.class);
 
     public OozieFeedWorkflowBuilder(Feed entity) {
         super(entity);
@@ -82,7 +83,7 @@ public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
         for (String clusterName : clusters) {
             org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, clusterName);
             if (!feedCluster.getValidity().getStart().before(feedCluster.getValidity().getEnd())) {
-                LOG.info("feed validity start <= end for cluster " + clusterName + ". Skipping schedule");
+                LOG.info("feed validity start <= end for cluster {}. Skipping schedule", clusterName);
                 break;
             }
 
@@ -130,8 +131,8 @@ public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
 
         if (feedCluster.getValidity().getEnd().before(new Date())) {
-            LOG.warn("Feed Retention is not applicable as Feed's end time for cluster " + cluster.getName()
-                + " is not in the future");
+            LOG.warn("Feed Retention is not applicable as Feed's end time for cluster {} is not in the future",
+                    cluster.getName());
             return null;
         }
 
@@ -380,8 +381,8 @@ public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
 
             if (noOverlapExists(sourceStartDate, sourceEndDate,
                 targetStartDate, targetEndDate)) {
-                LOG.warn("Not creating replication coordinator, as the source cluster:" + srcCluster.getName()
-                    + "and target cluster: " + trgCluster.getName() + " do not have overlapping dates");
+                LOG.warn("Not creating replication coordinator, as the source cluster: {} and target cluster: {} do "
+                    + "not have overlapping dates", srcCluster.getName(), trgCluster.getName());
                 return null;
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
index 1468ac4..8dea54c 100644
--- a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
+++ b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
@@ -21,7 +21,8 @@ package org.apache.falcon.listener;
 import org.apache.activemq.broker.BrokerService;
 import org.apache.falcon.JobTrackerService;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
@@ -30,7 +31,7 @@ import javax.servlet.ServletContextListener;
  * Listener for bootstrapping embedded hadoop cluster for integration tests.
  */
 public class HadoopStartupListener implements ServletContextListener {
-    private static final Logger LOG = Logger.getLogger(HadoopStartupListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HadoopStartupListener.class);
     private BrokerService broker;
 
     @Override
@@ -109,7 +110,7 @@ public class HadoopStartupListener implements ServletContextListener {
             @Override
             public void run() {
                 try {
-                    LOG.info("Starting service " + service.getClass().getName());
+                    LOG.info("Starting service {}", service.getClass().getName());
                     invoke(service, method, null, null);
                 } catch(Exception e) {
                     throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
index d3c1a69..679e9ea 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
@@ -23,7 +23,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -41,7 +42,7 @@ import java.util.Map;
 public class EntityInstanceMessage {
 
     private final Map<ARG, String> keyValueMap = new LinkedHashMap<ARG, String>();
-    private static final Logger LOG = Logger.getLogger(EntityInstanceMessage.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityInstanceMessage.class);
     private static final String FALCON_ENTITY_TOPIC_NAME = "FALCON.ENTITY.TOPIC";
 
     /**
@@ -150,7 +151,7 @@ public class EntityInstanceMessage {
         try {
             feedPaths = getFeedPaths(cmd);
         } catch (IOException e) {
-            LOG.error("Error getting instance paths: ", e);
+            LOG.error("Error getting instance paths", e);
             throw new RuntimeException(e);
         }
 
@@ -197,13 +198,13 @@ public class EntityInstanceMessage {
         String operation = cmd.getOptionValue(ARG.operation.getArgName());
 
         if (topicName.equals(FALCON_ENTITY_TOPIC_NAME)) {
-            LOG.debug("Returning instance paths for Falcon Topic: "
-                    + cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
+            LOG.debug("Returning instance paths for Falcon Topic: {}",
+                    cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
             return new String[]{cmd.getOptionValue(ARG.feedInstancePaths.getArgName()), };
         }
 
         if (operation.equals(EntityOps.GENERATE.name()) || operation.equals(EntityOps.REPLICATE.name())) {
-            LOG.debug("Returning instance paths: " + cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
+            LOG.debug("Returning instance paths: {}", cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
             return cmd.getOptionValue(ARG.feedInstancePaths.getArgName()).split(",");
         }
         //else case of feed retention
@@ -220,12 +221,12 @@ public class EntityInstanceMessage {
         IOUtils.copyBytes(instance, writer, 4096, true);
         String[] instancePaths = writer.toString().split("=");
         fs.delete(logFile, true);
-        LOG.info("Deleted feed instance paths file:" + logFile);
+        LOG.info("Deleted feed instance paths file: {}", logFile);
         if (instancePaths.length == 1) {
             LOG.debug("Returning 0 instance paths for feed ");
             return new String[0];
         } else {
-            LOG.debug("Returning instance paths for feed " + instancePaths[1]);
+            LOG.debug("Returning instance paths for feed {}", instancePaths[1]);
             return instancePaths[1].split(",");
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java b/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
index 2b9f573..ccac921 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
@@ -23,7 +23,8 @@ import org.apache.falcon.messaging.EntityInstanceMessage.ARG;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.jms.*;
 import java.lang.reflect.InvocationTargetException;
@@ -34,7 +35,7 @@ import java.lang.reflect.InvocationTargetException;
 public class MessageProducer extends Configured implements Tool {
 
     private Connection connection;
-    private static final Logger LOG = Logger.getLogger(MessageProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(MessageProducer.class);
     private static final long DEFAULT_TTL = 3 * 24 * 60 * 60 * 1000;
 
     /**
@@ -59,8 +60,7 @@ public class MessageProducer extends Configured implements Tool {
                     .getBrokerTTL());
             messageTTL = messageTTLinMins * 60 * 1000;
         } catch (NumberFormatException e) {
-            LOG.error("Error in parsing broker.ttl, setting TTL to:"
-                    + DEFAULT_TTL + " milli-seconds");
+            LOG.error("Error in parsing broker.ttl, setting TTL to: {} milli-seconds", DEFAULT_TTL);
         }
         producer.setTimeToLive(messageTTL);
         producer.send(new EntityInstanceMessageCreator(entityInstanceMessage)
@@ -159,20 +159,20 @@ public class MessageProducer extends Configured implements Tool {
             createAndStartConnection(cmd.getOptionValue(ARG.brokerImplClass.name()), "",
                     "", cmd.getOptionValue(ARG.brokerUrl.name()));
             for (EntityInstanceMessage message : entityInstanceMessage) {
-                LOG.info("Sending message:" + message.getKeyValueMap());
+                LOG.info("Sending message: {}", message.getKeyValueMap());
                 sendMessage(message);
             }
         } catch (JMSException e) {
-            LOG.error("Error in getConnection:", e);
+            LOG.error("Error in getConnection", e);
         } catch (Exception e) {
-            LOG.error("Error in getConnection:", e);
+            LOG.error("Error in getConnection", e);
         } finally {
             try {
                 if (connection != null) {
                     connection.close();
                 }
             } catch (JMSException e) {
-                LOG.error("Error in closing connection:", e);
+                LOG.error("Error in closing connection", e);
             }
         }
         return 0;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/metrics/pom.xml
----------------------------------------------------------------------
diff --git a/metrics/pom.xml b/metrics/pom.xml
index 2eb08ac..d4c9d03 100644
--- a/metrics/pom.xml
+++ b/metrics/pom.xml
@@ -51,8 +51,8 @@
         </dependency>
 
         <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
         </dependency>
     </dependencies>
 </project>


[4/4] git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/incubator-falcon

Posted by sh...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/incubator-falcon

Conflicts:
	common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/5afbf358
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/5afbf358
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/5afbf358

Branch: refs/heads/master
Commit: 5afbf3585be80df06d6a0d755d0fe2569ec50a0e
Parents: b036d74 97f89a1
Author: Shwetha GS <sh...@inmobi.com>
Authored: Wed May 21 12:50:44 2014 +0530
Committer: Shwetha GS <sh...@inmobi.com>
Committed: Wed May 21 12:50:44 2014 +0530

----------------------------------------------------------------------
 CHANGES.txt                                     |  3 +
 .../InstanceRelationshipGraphBuilder.java       | 72 +++++++++++---------
 .../metadata/MetadataMappingServiceTest.java    | 27 ++++----
 3 files changed, 57 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/5afbf358/CHANGES.txt
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/5afbf358/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
----------------------------------------------------------------------


[2/4] FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by Jean-Baptiste Onofré

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
----------------------------------------------------------------------
diff --git a/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java b/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
index 314e27b..29c77ce 100644
--- a/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
+++ b/metrics/src/main/java/org/apache/falcon/aspect/AbstractFalconAspect.java
@@ -19,10 +19,11 @@
 package org.apache.falcon.aspect;
 
 import org.apache.falcon.util.ResourcesReflectionUtil;
-import org.apache.log4j.Logger;
 import org.aspectj.lang.ProceedingJoinPoint;
 import org.aspectj.lang.annotation.Around;
 import org.aspectj.lang.annotation.Aspect;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -34,7 +35,7 @@ import java.util.Map;
 @Aspect
 public abstract class AbstractFalconAspect {
 
-    private static final Logger LOG = Logger.getLogger(AbstractFalconAspect.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractFalconAspect.class);
 
     @Around("@annotation(org.apache.falcon.monitors.Monitored)")
     public Object logAround(ProceedingJoinPoint joinPoint) throws Throwable {
@@ -74,8 +75,7 @@ public abstract class AbstractFalconAspect {
         Map<String, String> dimensions = new HashMap<String, String>();
 
         if (ResourcesReflectionUtil.getResourceDimensionsName(methodName) == null) {
-            LOG.warn("Class for method name: " + methodName
-                    + " is not added to ResourcesReflectionUtil");
+            LOG.warn("Class for method name: {} is not added to ResourcesReflectionUtil", methodName);
         } else {
             for (Map.Entry<Integer, String> param : ResourcesReflectionUtil
                     .getResourceDimensionsName(methodName).entrySet()) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
----------------------------------------------------------------------
diff --git a/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java b/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
index 41a52bf..77d0d30 100644
--- a/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
+++ b/metrics/src/main/java/org/apache/falcon/plugin/LoggingPlugin.java
@@ -19,16 +19,17 @@
 package org.apache.falcon.plugin;
 
 import org.apache.falcon.aspect.ResourceMessage;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Plugin for logging metrics using log4j.
  */
 public class LoggingPlugin implements MonitoringPlugin {
-    private static final Logger METRIC = Logger.getLogger("METRIC");
+    private static final Logger METRIC = LoggerFactory.getLogger("METRIC");
 
     @Override
     public void monitor(ResourceMessage message) {
-        METRIC.info(message);
+        METRIC.info("{}", message);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie-el-extensions/pom.xml
----------------------------------------------------------------------
diff --git a/oozie-el-extensions/pom.xml b/oozie-el-extensions/pom.xml
index 0c8d135..8f7695e 100644
--- a/oozie-el-extensions/pom.xml
+++ b/oozie-el-extensions/pom.xml
@@ -62,8 +62,13 @@
         </dependency>
 
         <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <scope>test</scope>
         </dependency>
 
         <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java b/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
index ae4dd12..a685faf 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/DefaultTaskLogRetriever.java
@@ -24,7 +24,8 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -33,7 +34,7 @@ import java.io.IOException;
  */
 public class DefaultTaskLogRetriever extends Configured implements TaskLogURLRetriever {
 
-    private static final Logger LOG = Logger.getLogger(DefaultTaskLogRetriever.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultTaskLogRetriever.class);
 
     @Override
     public String retrieveTaskLogURL(String jobId) throws IOException {
@@ -42,7 +43,7 @@ public class DefaultTaskLogRetriever extends Configured implements TaskLogURLRet
 
         RunningJob job = jobClient.getJob(JobID.forName(jobId));
         if (job == null) {
-            LOG.warn("No running job for job id: " + jobId);
+            LOG.warn("No running job for job id: {}", jobId);
             return getFromHistory(jobId);
         }
         TaskCompletionEvent[] tasks = job.getTaskCompletionEvents(0);
@@ -51,7 +52,7 @@ public class DefaultTaskLogRetriever extends Configured implements TaskLogURLRet
             return tasks[1].getTaskTrackerHttp() + "/tasklog?attemptid="
                     + tasks[1].getTaskAttemptId() + "&all=true";
         } else {
-            LOG.warn("No running task for job: " + jobId);
+            LOG.warn("No running task for job: {}", jobId);
             return getFromHistory(jobId);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/LogMover.java b/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
index 117aa58..3922b38 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
@@ -31,11 +31,12 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClient;
 import org.apache.oozie.client.OozieClientException;
 import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.client.WorkflowJob;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
@@ -53,7 +54,7 @@ import java.util.Set;
  */
 public class LogMover extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(LogMover.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LogMover.class);
     public static final Set<String> FALCON_ACTIONS =
         new HashSet<String>(Arrays.asList(new String[]{"eviction", "replication", }));
 
@@ -84,7 +85,7 @@ public class LogMover extends Configured implements Tool {
             try {
                 jobInfo = client.getJobInfo(args.subflowId);
             } catch (OozieClientException e) {
-                LOG.error("Error getting jobinfo for: " + args.subflowId, e);
+                LOG.error("Error getting jobinfo for: {}", args.subflowId, e);
                 return 0;
             }
 
@@ -115,14 +116,13 @@ public class LogMover extends Configured implements Tool {
                             || action.getType().equals("java")) {
                         copyTTlogs(fs, path, action);
                     } else {
-                        LOG.info("Ignoring hadoop TT log for non-pig and non-java action:"
-                                + action.getName());
+                        LOG.info("Ignoring hadoop TT log for non-pig and non-java action: {}", action.getName());
                     }
                 }
             }
 
         } catch (Exception e) {
-            LOG.error("Exception in log mover:", e);
+            LOG.error("Exception in log mover", e);
         }
         return 0;
     }
@@ -137,20 +137,19 @@ public class LogMover extends Configured implements Tool {
         InputStream in = new ByteArrayInputStream(client.getJobLog(id).getBytes());
         OutputStream out = fs.create(new Path(path, "oozie.log"));
         IOUtils.copyBytes(in, out, 4096, true);
-        LOG.info("Copied oozie log to " + path);
+        LOG.info("Copied oozie log to {}", path);
     }
 
     private void copyTTlogs(FileSystem fs, Path path,
                             WorkflowAction action) throws Exception {
         String ttLogURL = getTTlogURL(action.getExternalId());
         if (ttLogURL != null) {
-            LOG.info("Fetching log for action: " + action.getExternalId()
-                    + " from url: " + ttLogURL);
+            LOG.info("Fetching log for action: {} from url: {}", action.getExternalId(), ttLogURL);
             InputStream in = getURLinputStream(new URL(ttLogURL));
             OutputStream out = fs.create(new Path(path, action.getName() + "_"
                     + getMappedStatus(action.getStatus()) + ".log"));
             IOUtils.copyBytes(in, out, 4096, true);
-            LOG.info("Copied log to " + path);
+            LOG.info("Copied log to {}", path);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java b/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
index 0c338da..4ed8f52 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
@@ -32,9 +32,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClientException;
 import org.mortbay.log.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -42,7 +43,7 @@ import java.io.IOException;
  * Get oozie action execution logs corresponding to a run as saved by the log mover.
  */
 public final class LogProvider {
-    private static final Logger LOG = Logger.getLogger(LogProvider.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LogProvider.class);
 
     public Instance populateLogUrls(Entity entity, Instance instance,
                                     String runId) throws FalconException {
@@ -86,7 +87,7 @@ public final class LogProvider {
                 // order of runs
                 return runs[runs.length - 1].getPath().getName();
             } else {
-                LOG.warn("No run dirs are available in logs dir:" + jobPath);
+                LOG.warn("No run dirs are available in logs dir: {}", jobPath);
                 return "-";
             }
         } else {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java b/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
index 11dc1e4..c61e089 100644
--- a/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
+++ b/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
@@ -32,7 +32,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -42,7 +43,7 @@ import java.util.Properties;
  * Host shared libraries in oozie shared lib dir upon creation or modification of cluster.
  */
 public class SharedLibraryHostingService implements ConfigurationChangeListener {
-    private static final Logger LOG = Logger.getLogger(SharedLibraryHostingService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SharedLibraryHostingService.class);
 
     private static final String[] LIBS = StartupProperties.get().getProperty("shared.libs").split(",");
 
@@ -84,7 +85,7 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
             pushLibsToHDFS(properties.getProperty("libext.process.paths"),
                     new Path(libext, EntityType.PROCESS.name()) , cluster, null);
         } catch (IOException e) {
-            LOG.error("Failed to copy shared libs to cluster " + cluster.getName(), e);
+            LOG.error("Failed to copy shared libs to cluster {}", cluster.getName(), e);
         }
     }
 
@@ -94,7 +95,7 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
             return;
         }
 
-        LOG.debug("Copying libs from " + src);
+        LOG.debug("Copying libs from {}", src);
         FileSystem fs;
         try {
             fs = getFileSystem(cluster);
@@ -132,7 +133,7 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
                     }
                 }
                 fs.copyFromLocalFile(false, true, new Path(file.getAbsolutePath()), targetFile);
-                LOG.info("Copied " + file.getAbsolutePath() + " to " + targetFile.toString() + " in " + fs.getUri());
+                LOG.info("Copied {} to {} in {}", file.getAbsolutePath(), targetFile.toString(), fs.getUri());
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java b/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
index fc4eabd..d3befa2 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
@@ -28,7 +28,8 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -37,7 +38,7 @@ import java.util.List;
  * Utility called by oozie workflow engine post workflow execution in parent workflow.
  */
 public class FalconPostProcessing extends Configured implements Tool {
-    private static final Logger LOG = Logger.getLogger(FalconPostProcessing.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FalconPostProcessing.class);
 
     /**
      * Args that the utility understands.
@@ -101,19 +102,19 @@ public class FalconPostProcessing extends Configured implements Tool {
 
         CommandLine cmd = getCommand(args);
 
-        LOG.info("Sending user message " + cmd);
+        LOG.info("Sending user message {}", cmd);
         invokeUserMessageProducer(cmd);
 
         if ("SUCCEEDED".equals(Arg.STATUS.getOptionValue(cmd))) {
-            LOG.info("Recording lineage for " + cmd);
+            LOG.info("Recording lineage for {}", cmd);
             recordLineageMetadata(cmd);
         }
 
         //LogMover doesn't throw exception, a failed log mover will not fail the user workflow
-        LOG.info("Moving logs " + cmd);
+        LOG.info("Moving logs {}", cmd);
         invokeLogProducer(cmd);
 
-        LOG.info("Sending falcon message " + cmd);
+        LOG.info("Sending falcon message {}", cmd);
         invokeFalconMessageProducer(cmd);
 
         return 0;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java b/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
index 96b5a64..7616df1 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/OozieWorkflowBuilder.java
@@ -59,8 +59,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBElement;
@@ -88,7 +89,7 @@ import java.util.Set;
  */
 public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBuilder<T> {
 
-    private static final Logger LOG = Logger.getLogger(OozieWorkflowBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieWorkflowBuilder.class);
     protected static final ConfigurationStore CONFIG_STORE = ConfigurationStore.get();
 
     protected static final String NOMINAL_TIME_EL = "${coord:formatTime(coord:nominalTime(), 'yyyy-MM-dd-HH-mm')}";
@@ -272,7 +273,7 @@ public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBui
                 props.put("shouldRecord", "true");
             }
         } catch (FalconException e) {
-            LOG.error("Unable to get Late Process for entity:" + entity, e);
+            LOG.error("Unable to get Late Process for entity: {}", entity, e);
             throw new FalconRuntimException(e);
         }
         props.put("entityName", entity.getName());
@@ -316,11 +317,11 @@ public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBui
             if (LOG.isDebugEnabled()) {
                 StringWriter writer = new StringWriter();
                 marshaller.marshal(jaxbElement, writer);
-                LOG.debug("Writing definition to " + outPath + " on cluster " + cluster.getName());
-                LOG.debug(writer.getBuffer());
+                LOG.debug("Writing definition to {} on cluster {}", outPath, cluster.getName());
+                LOG.debug(writer.getBuffer().toString());
             }
 
-            LOG.info("Marshalled " + jaxbElement.getDeclaredType() + " to " + outPath);
+            LOG.info("Marshalled {} to {}", jaxbElement.getDeclaredType(), outPath);
         } catch (Exception e) {
             throw new FalconException("Unable to marshall app object", e);
         }
@@ -591,7 +592,7 @@ public abstract class OozieWorkflowBuilder<T extends Entity> extends WorkflowBui
             propagateHiveCredentials(cluster, properties);
         }
 
-        LOG.info("Cluster: " + cluster.getName() + ", PROPS: " + properties);
+        LOG.info("Cluster: {}, PROPS: {}", cluster.getName(), properties);
         return properties;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
index d6dd2af..d598097 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
@@ -23,8 +23,9 @@ import org.apache.falcon.entity.ClusterHelper;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.ProxyOozieClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -33,7 +34,7 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public final class OozieClientFactory {
 
-    private static final Logger LOG = Logger.getLogger(OozieClientFactory.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieClientFactory.class);
     private static final String LOCAL_OOZIE = "local";
 
     private static final ConcurrentHashMap<String, ProxyOozieClient> CACHE =
@@ -49,7 +50,7 @@ public final class OozieClientFactory {
         String oozieUrl = ClusterHelper.getOozieUrl(cluster);
         if (!CACHE.containsKey(oozieUrl)) {
             ProxyOozieClient ref = getClientRef(oozieUrl);
-            LOG.info("Caching Oozie client object for " + oozieUrl);
+            LOG.info("Caching Oozie client object for {}", oozieUrl);
             CACHE.putIfAbsent(oozieUrl, ref);
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
index a930bb7..bbed949 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
@@ -28,14 +28,15 @@ import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Service that cleans up artifacts that falcon dropped on hdfs for oozie's use.
  */
 public class OozieHouseKeepingService implements WorkflowEngineActionListener {
 
-    private static final Logger LOG = Logger.getLogger(OozieHouseKeepingService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieHouseKeepingService.class);
 
     @Override
     public void beforeSchedule(Entity entity, String cluster) throws FalconException {
@@ -54,7 +55,7 @@ public class OozieHouseKeepingService implements WorkflowEngineActionListener {
         try {
             Cluster cluster = EntityUtil.getEntity(EntityType.CLUSTER, clusterName);
             Path entityPath = EntityUtil.getBaseStagingPath(cluster, entity);
-            LOG.info("Deleting entity path " + entityPath + " on cluster " + clusterName);
+            LOG.info("Deleting entity path {} on cluster {}", entityPath, clusterName);
 
             Configuration conf = ClusterHelper.getConfiguration(cluster);
             FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
index 9d4103b..57fca0f 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
@@ -40,11 +40,12 @@ import org.apache.falcon.workflow.OozieWorkflowBuilder;
 import org.apache.falcon.workflow.WorkflowBuilder;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.*;
 import org.apache.oozie.client.CoordinatorJob.Timeunit;
 import org.apache.oozie.client.Job.Status;
 import org.apache.oozie.client.rest.RestConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.*;
@@ -55,7 +56,7 @@ import java.util.Map.Entry;
  */
 public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
-    private static final Logger LOG = Logger.getLogger(OozieWorkflowEngine.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieWorkflowEngine.class);
 
     public static final String ENGINE = "oozie";
     private static final BundleJob MISSING = new NullBundleJob();
@@ -110,7 +111,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             if (bundleJob == MISSING) {
                 schedClusters.add(cluster);
             } else {
-                LOG.debug("The entity " + entity.getName() + " is already scheduled on cluster " + cluster);
+                LOG.debug("The entity {} is already scheduled on cluster {}", entity.getName(), cluster);
             }
         }
 
@@ -120,7 +121,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 String[schedClusters.size()]));
             for (Map.Entry<String, Properties> entry : newFlows.entrySet()) {
                 String cluster = entry.getKey();
-                LOG.info("Scheduling " + entity.toShortString() + " on cluster " + cluster);
+                LOG.info("Scheduling {} on cluster {}", entity.toShortString(), cluster);
                 scheduleEntity(cluster, entry.getValue(), entity);
                 commitStagingPath(cluster, entry.getValue().getProperty(OozieClient.BUNDLE_APP_PATH));
             }
@@ -196,7 +197,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                     //Filtering bundles that correspond to deleted entities(endtime is set when an entity is deleted)
                     if (job.getEndTime() == null) {
                         filteredJobs.add(job);
-                        LOG.debug("Found bundle " + job.getId());
+                        LOG.debug("Found bundle {}", job.getId());
                     }
                 }
                 return filteredJobs;
@@ -278,7 +279,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
         List<BundleJob> jobs = findBundles(entity, cluster);
         if (jobs.isEmpty()) {
-            LOG.warn("No active job found for " + entity.getName());
+            LOG.warn("No active job found for {}", entity.getName());
             return "FAILED";
         }
 
@@ -320,16 +321,16 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             //kill all coords
             for (CoordinatorJob coord : job.getCoordinators()) {
                 client.kill(coord.getId());
-                LOG.debug("Killed coord " + coord.getId() + " on cluster " + clusterName);
+                LOG.debug("Killed coord {} on cluster {}", coord.getId(), clusterName);
             }
 
             //set end time of bundle
             client.change(job.getId(), OozieClient.CHANGE_VALUE_ENDTIME + "=" + SchemaHelper.formatDateUTC(new Date()));
-            LOG.debug("Changed end time of bundle " + job.getId() + " on cluster " + clusterName);
+            LOG.debug("Changed end time of bundle {} on cluster {}", job.getId(), clusterName);
 
             //kill bundle
             client.kill(job.getId());
-            LOG.debug("Killed bundle " + job.getId() + " on cluster " + clusterName);
+            LOG.debug("Killed bundle {} on cluster {}", job.getId(), clusterName);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -494,7 +495,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 try {
                     status = performAction(cluster, action, coordinatorAction, props);
                 } catch (FalconException e) {
-                    LOG.warn("Unable to perform action " + action + " on cluster ", e);
+                    LOG.warn("Unable to perform action {} on cluster", action, e);
                     status = WorkflowStatus.ERROR.name();
                     overallStatus = APIResult.Status.PARTIAL;
                 }
@@ -571,7 +572,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                     coordJob = client.getCoordJobInfo(coord.getId(), null, startActionNumber,
                         (lastMaterializedActionNumber - startActionNumber));
                 } catch (OozieClientException e) {
-                    LOG.debug("Unable to get details for coordinator " + coord.getId() + " " + e.getMessage());
+                    LOG.debug("Unable to get details for coordinator {}", coord.getId(), e);
                     throw new FalconException(e);
                 }
 
@@ -675,7 +676,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 org.apache.oozie.client.CoordinatorAction.Status.RUNNING,
                 org.apache.oozie.client.CoordinatorAction.Status.WAITING,
                 org.apache.oozie.client.CoordinatorAction.Status.READY);
-            LOG.info("Rerun job " + coordinatorAction.getId() + " on cluster " + cluster);
+            LOG.info("Rerun job {} on cluster {}", coordinatorAction.getId(), cluster);
         } catch (Exception e) {
             LOG.error("Unable to rerun workflows", e);
             throw new FalconException(e);
@@ -773,7 +774,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                             actions.add(coordActionInfo);
                         }
                     } catch (OozieClientException e) {
-                        LOG.debug("Unable to get action for " + actionId + " " + e.getMessage());
+                        LOG.debug("Unable to get action for {}", actionId, e);
                     }
 
                     Calendar startCal = Calendar.getInstance(EntityUtil.getTimeZone(coord.getTimeZone()));
@@ -860,7 +861,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         boolean wfUpdated = UpdateHelper.isWorkflowUpdated(cluster, newEntity);
 
         if (!entityUpdated && !wfUpdated) {
-            LOG.debug("Nothing to update for cluster " + cluster);
+            LOG.debug("Nothing to update for cluster {}", cluster);
             return null;
         }
 
@@ -869,27 +870,28 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         if (stagingPath != null) {  //update if entity is scheduled
             BundleJob bundle = findBundleForStagingPath(cluster, oldEntity, stagingPath);
             bundle = getBundleInfo(cluster, bundle.getId());
-            LOG.info("Updating entity through Workflow Engine" + newEntity.toShortString());
+            LOG.info("Updating entity through Workflow Engine {}", newEntity.toShortString());
             Date newEndTime = EntityUtil.getEndTime(newEntity, cluster);
             if (newEndTime.before(now())) {
                 throw new FalconException("New end time for " + newEntity.getName() + " is past current time. Entity "
                     + "can't be updated. Use remove and add");
             }
 
-            LOG.debug("Updating for cluster : " + cluster + ", bundle: " + bundle.getId());
+            LOG.debug("Updating for cluster: {}, bundle: {}", cluster, bundle.getId());
 
             if (canUpdateBundle(oldEntity, newEntity, wfUpdated)) {
                 // only concurrency and endtime are changed. So, change coords
-                LOG.info("Change operation is adequate! : " + cluster + ", bundle: " + bundle.getId());
+                LOG.info("Change operation is adequate! : {}, bundle: {}", cluster, bundle.getId());
                 updateCoords(cluster, bundle, EntityUtil.getParallel(newEntity),
                     EntityUtil.getEndTime(newEntity, cluster));
                 return newEndTime;
             }
 
-            LOG.debug("Going to update ! : " + newEntity.toShortString() + "for cluster " + cluster + ", "
-                + "bundle: " + bundle.getId());
+            LOG.debug("Going to update! : {} for cluster {}, bundle: {}",
+                    newEntity.toShortString(), cluster, bundle.getId());
             effectiveTime = updateInternal(oldEntity, newEntity, cluster, bundle, false, effectiveTime);
-            LOG.info("Entity update complete : " + newEntity.toShortString() + cluster + ", bundle: " + bundle.getId());
+            LOG.info("Entity update complete: {} for cluster {}, bundle: {}",
+                    newEntity.toShortString(), cluster, bundle.getId());
         }
 
         //Update affected entities
@@ -899,7 +901,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 continue;
             }
 
-            LOG.info("Dependent entities need to be updated " + affectedEntity.toShortString());
+            LOG.info("Dependent entities need to be updated {}", affectedEntity.toShortString());
             if (!UpdateHelper.shouldUpdate(oldEntity, newEntity, affectedEntity, cluster)) {
                 continue;
             }
@@ -909,17 +911,17 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 continue;
             }
 
-            LOG.info("Triggering update for " + cluster + ", " + affectedProcBundle.getId());
+            LOG.info("Triggering update for {}, {}", cluster, affectedProcBundle.getId());
 
             Date depEndTime =
                 updateInternal(affectedEntity, affectedEntity, cluster, affectedProcBundle, false, effectiveTime);
             if (effectiveTime == null || effectiveTime.after(depEndTime)) {
                 effectiveTime = depEndTime;
             }
-            LOG.info("Entity update complete : " + affectedEntity.toShortString() + cluster + ", "+ "bundle: "
-                + affectedProcBundle.getId());
+            LOG.info("Entity update complete: {} for cluster {}, bundle: {}",
+                    affectedEntity.toShortString(), cluster, affectedProcBundle.getId());
         }
-        LOG.info("Entity update and all dependent entities updated: " + oldEntity.toShortString());
+        LOG.info("Entity update and all dependent entities updated: {}", oldEntity.toShortString());
         return effectiveTime;
     }
 
@@ -964,26 +966,26 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
         // change coords
         for (CoordinatorJob coord : bundle.getCoordinators()) {
-            LOG.debug("Updating endtime of coord " + coord.getId() + " to " + SchemaHelper.formatDateUTC(endTime)
-                + " on cluster " + cluster);
+            LOG.debug("Updating endtime of coord {} to {} on cluster {}",
+                    coord.getId(), SchemaHelper.formatDateUTC(endTime), cluster);
             Date lastActionTime = getCoordLastActionTime(coord);
             if (lastActionTime == null) { // nothing is materialized
-                LOG.info("Nothing is materialized for this coord: " + coord.getId());
+                LOG.info("Nothing is materialized for this coord: {}", coord.getId());
                 if (endTime.compareTo(coord.getStartTime()) <= 0) {
-                    LOG.info("Setting end time to START TIME " + SchemaHelper.formatDateUTC(coord.getStartTime()));
+                    LOG.info("Setting end time to START TIME {}", SchemaHelper.formatDateUTC(coord.getStartTime()));
                     change(cluster, coord.getId(), concurrency, coord.getStartTime(), null);
                 } else {
-                    LOG.info("Setting end time to START TIME " + SchemaHelper.formatDateUTC(endTime));
+                    LOG.info("Setting end time to START TIME {}", SchemaHelper.formatDateUTC(endTime));
                     change(cluster, coord.getId(), concurrency, endTime, null);
                 }
             } else {
-                LOG.info("Actions have materialized for this coord: " + coord.getId() + ", last action "
-                    + SchemaHelper.formatDateUTC(lastActionTime));
+                LOG.info("Actions have materialized for this coord: {}, last action {}",
+                        coord.getId(), SchemaHelper.formatDateUTC(lastActionTime));
                 if (!endTime.after(lastActionTime)) {
                     Date pauseTime = offsetTime(endTime, -1);
                     // set pause time which deletes future actions
-                    LOG.info("Setting pause time on coord : " + coord.getId() + " to " + SchemaHelper.formatDateUTC(
-                        pauseTime));
+                    LOG.info("Setting pause time on coord: {} to {}",
+                            coord.getId(), SchemaHelper.formatDateUTC(pauseTime));
                     change(cluster, coord.getId(), concurrency, null, SchemaHelper.formatDateUTC(pauseTime));
                 }
                 change(cluster, coord.getId(), concurrency, endTime, "");
@@ -1037,16 +1039,16 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             //newBundleId and latestBundle will be null if effectiveTime = process end time
             if (newBundleId != null) {
                 latestBundle = getBundleInfo(cluster, newBundleId);
-                LOG.info("New bundle " + newBundleId + " scheduled successfully with start time "
-                    + SchemaHelper.formatDateUTC(effectiveTime));
+                LOG.info("New bundle {} scheduled successfully with start time {}",
+                        newBundleId, SchemaHelper.formatDateUTC(effectiveTime));
             }
         } else {
-            LOG.info("New bundle has already been created. Bundle Id: " + latestBundle.getId() + ", Start: "
-                + SchemaHelper.formatDateUTC(latestBundle.getStartTime()) + ", End: " + latestBundle.getEndTime());
+            LOG.info("New bundle has already been created. Bundle Id: {}, Start: {}, End: {}", latestBundle.getId(),
+                SchemaHelper.formatDateUTC(latestBundle.getStartTime()), latestBundle.getEndTime());
 
             //pick effectiveTime from already created bundle
             effectiveTime = getMinStartTime(latestBundle);
-            LOG.info("Will set old coord end time to " + SchemaHelper.formatDateUTC(effectiveTime));
+            LOG.info("Will set old coord end time to {}", SchemaHelper.formatDateUTC(effectiveTime));
         }
         if (effectiveTime != null) {
             //set endtime for old coords
@@ -1072,11 +1074,11 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         EntityUtil.setStartDate(entity, cluster, startDate);
         WorkflowBuilder<Entity> builder = WorkflowBuilder.getBuilder(ENGINE, clone);
         Map<String, Properties> bundleProps = builder.newWorkflowSchedule(cluster);
-        LOG.info("Scheduling " + entity.toShortString() + " on cluster " + cluster + " with props " + bundleProps);
+        LOG.info("Scheduling {} on cluster {} with props {}", entity.toShortString(), cluster, bundleProps);
         if (bundleProps != null && bundleProps.size() > 0) {
             return scheduleEntity(cluster, bundleProps.get(cluster), entity);
         } else {
-            LOG.info("No new workflow to be scheduled for this " + entity.toShortString());
+            LOG.info("No new workflow to be scheduled for this {}", entity.toShortString());
             return null;
         }
     }
@@ -1133,7 +1135,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             jobprops.remove(OozieClient.BUNDLE_APP_PATH);
             client.reRun(jobId, jobprops);
             assertStatus(cluster, jobId, Job.Status.RUNNING);
-            LOG.info("Rerun job " + jobId + " on cluster " + cluster);
+            LOG.info("Rerun job {} on cluster {}", jobId, cluster);
         } catch (Exception e) {
             LOG.error("Unable to rerun workflows", e);
             throw new FalconException(e);
@@ -1204,7 +1206,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     private String run(String cluster, Properties props) throws FalconException {
         try {
             String jobId = OozieClientFactory.get(cluster).run(props);
-            LOG.info("Submitted " + jobId + " on cluster " + cluster + " with properties : " + props);
+            LOG.info("Submitted {} on cluster {} with properties: {}", jobId, cluster, props);
             return jobId;
         } catch (OozieClientException e) {
             LOG.error("Unable to schedule workflows", e);
@@ -1217,7 +1219,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             OozieClientFactory.get(cluster).suspend(jobId);
             assertStatus(cluster, jobId, Status.PREPSUSPENDED, Status.SUSPENDED, Status.SUCCEEDED, Status.FAILED,
                 Status.KILLED);
-            LOG.info("Suspended job " + jobId + " on cluster " + cluster);
+            LOG.info("Suspended job {} on cluster {}", jobId, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1227,7 +1229,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         try {
             OozieClientFactory.get(cluster).resume(jobId);
             assertStatus(cluster, jobId, Status.PREP, Status.RUNNING, Status.SUCCEEDED, Status.FAILED, Status.KILLED);
-            LOG.info("Resumed job " + jobId + " on cluster " + cluster);
+            LOG.info("Resumed job {} on cluster {}", jobId, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1237,7 +1239,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         try {
             OozieClientFactory.get(cluster).kill(jobId);
             assertStatus(cluster, jobId, Status.KILLED, Status.SUCCEEDED, Status.FAILED);
-            LOG.info("Killed job " + jobId + " on cluster " + cluster);
+            LOG.info("Killed job {} on cluster {}", jobId, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1246,7 +1248,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     private void change(String cluster, String jobId, String changeValue) throws FalconException {
         try {
             OozieClientFactory.get(cluster).change(jobId, changeValue);
-            LOG.info("Changed bundle/coord " + jobId + ": " + changeValue + " on cluster " + cluster);
+            LOG.info("Changed bundle/coord {}: {} on cluster {}", jobId, changeValue, cluster);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1289,9 +1291,9 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                 }
                 coord = client.getCoordJobInfo(id);
             }
-            LOG.error("Failed to change coordinator. Current value " + coord.getConcurrency() + ", "
-                + SchemaHelper.formatDateUTC(coord.getEndTime()) + ", " + SchemaHelper.formatDateUTC(
-                    coord.getPauseTime()));
+            LOG.error("Failed to change coordinator. Current value {}, {}, {}",
+                    coord.getConcurrency(), SchemaHelper.formatDateUTC(coord.getEndTime()),
+                    SchemaHelper.formatDateUTC(coord.getPauseTime()));
             throw new FalconException("Failed to change coordinator " + id + " with change value " + changeValueStr);
         } catch (OozieClientException e) {
             throw new FalconException(e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java b/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
index c78a83a..3d0e903 100644
--- a/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
+++ b/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
@@ -24,10 +24,11 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.hadoop.hdfs.web.KerberosUgiAuthenticator;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.Authenticator;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.rest.RestConstants;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStreamReader;
@@ -47,7 +48,7 @@ import java.util.concurrent.Callable;
  */
 public class ProxyOozieClient extends AuthOozieClient {
 
-    private static final Logger LOG = Logger.getLogger(ProxyOozieClient.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ProxyOozieClient.class);
     private static final Map<String, String> NONE = new HashMap<String, String>();
 
     private final Authenticator authenticator = new KerberosUgiAuthenticator();
@@ -78,9 +79,7 @@ public class ProxyOozieClient extends AuthOozieClient {
         throws IOException, OozieClientException {
 
         final URL decoratedUrl = decorateUrlWithUser(url);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("ProxyOozieClient.createConnection: u=" + url + ", m=" + method);
-        }
+        LOG.debug("ProxyOozieClient.createConnection: u={}, m={}", url, method);
 
         UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
         try {
@@ -115,9 +114,7 @@ public class ProxyOozieClient extends AuthOozieClient {
             // strUrl += "&" + RestConstants.DO_AS_PARAM + "=" + CurrentUser.getUser();
 
             url = new URL(strUrl);
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Decorated url with user info: " + url);
-            }
+            LOG.debug("Decorated url with user info: {}", url);
         }
 
         return url;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
----------------------------------------------------------------------
diff --git a/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java b/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
index 5a2e570..881b0c0 100644
--- a/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
+++ b/oozie/src/versioned-src/v1/java/org/apache/falcon/logging/v1/TaskLogRetrieverV1.java
@@ -26,18 +26,18 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.DefaultJobHistoryParser;
 import org.apache.hadoop.mapred.JobHistory;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URLDecoder;
 
 /**
  * Hadoop v1 task log retriever based on job history
  */
 public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
-    private static final Logger LOG = Logger.getLogger(TaskLogRetrieverV1.class);
+    private static final Logger LOG = LoggerFactory.getLogger(TaskLogRetrieverV1.class);
 
     @Override
     public String getFromHistory(String jobId) throws IOException {
@@ -46,8 +46,8 @@ public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
         if (file == null) return null;
         JobHistory.JobInfo jobInfo = new JobHistory.JobInfo(jobId);
         DefaultJobHistoryParser.parseJobTasks(file, jobInfo, new Path(file).getFileSystem(conf));
-        LOG.info("History file:" + file);
-        LOG.debug("Number of tasks in the history file: " + jobInfo.getAllTasks().size());
+        LOG.info("History file: {}", file);
+        LOG.debug("Number of tasks in the history file: {}", jobInfo.getAllTasks().size());
         for (JobHistory.Task task : jobInfo.getAllTasks().values()) {
             if (task.get(JobHistory.Keys.TASK_TYPE).equals(JobHistory.Values.MAP.name()) &&
                     task.get(JobHistory.Keys.TASK_STATUS).equals(JobHistory.Values.SUCCESS.name())) {
@@ -80,7 +80,7 @@ public final class TaskLogRetrieverV1 extends DefaultTaskLogRetriever {
                 file = file.substring(file.lastIndexOf('=') + 1);
                 file = JobHistory.JobInfo.decodeJobHistoryFileName(file);
             } else {
-                LOG.warn("JobURL " + jobUrl + " for id: " + jobId + " returned " + status);
+                LOG.warn("JobURL {} for id: {} returned {}", jobUrl, jobId, status);
             }
             return file;
         } catch (URISyntaxException e) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5a5716e..3550b7b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -102,7 +102,7 @@
 
         <include.prism>true</include.prism>
 
-        <slf4j.version>1.6.1</slf4j.version>
+        <slf4j.version>1.7.5</slf4j.version>
         <oozie.version>4.0.1</oozie.version>
         <oozie.buildversion>${oozie.version}-falcon</oozie.buildversion>
         <oozie.forcebuild>false</oozie.forcebuild>
@@ -511,12 +511,6 @@
             </dependency>
 
             <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-simple</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-
-            <dependency>
                 <groupId>commons-lang</groupId>
                 <artifactId>commons-lang</artifactId>
                 <version>2.6</version>
@@ -525,7 +519,7 @@
             <dependency>
                 <groupId>log4j</groupId>
                 <artifactId>log4j</artifactId>
-                <version>1.2.15</version>
+                <version>1.2.17</version>
                 <scope>compile</scope>
                 <exclusions>
                     <exclusion>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/FalconWebException.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/FalconWebException.java b/prism/src/main/java/org/apache/falcon/FalconWebException.java
index 65cf7d2..d552c07 100644
--- a/prism/src/main/java/org/apache/falcon/FalconWebException.java
+++ b/prism/src/main/java/org/apache/falcon/FalconWebException.java
@@ -22,7 +22,8 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.falcon.resource.APIResult;
 import org.apache.falcon.resource.InstancesResult;
 import org.apache.falcon.resource.InstancesSummaryResult;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
@@ -33,7 +34,7 @@ import javax.ws.rs.core.Response;
  */
 public class FalconWebException extends WebApplicationException {
 
-    private static final Logger LOG = Logger.getLogger(FalconWebException.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FalconWebException.class);
 
     public static FalconWebException newException(Throwable e,
                                                   Response.Status status) {
@@ -49,28 +50,28 @@ public class FalconWebException extends WebApplicationException {
     public static FalconWebException newInstanceSummaryException(Throwable e, Response.Status status) {
         LOG.error("Failure reason", e);
         String message = e.getMessage() + "\n" + getAddnInfo(e);
-        LOG.error("Action failed: " + status + "\nError:" + message);
+        LOG.error("Action failed: {}\nError: {}", status, message);
         APIResult result = new InstancesSummaryResult(APIResult.Status.FAILED, message);
         return new FalconWebException(Response.status(status).entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }
 
     public static FalconWebException newException(APIResult result,
                                                   Response.Status status) {
-        LOG.error("Action failed: " + status + "\nError:" + result.getMessage());
+        LOG.error("Action failed: {}\nError: {}", status, result.getMessage());
         return new FalconWebException(Response.status(status).
                 entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }
 
     public static FalconWebException newException(String message,
                                                   Response.Status status) {
-        LOG.error("Action failed: " + status + "\nError:" + message);
+        LOG.error("Action failed: {}\nError: {}", status, message);
         APIResult result = new APIResult(APIResult.Status.FAILED, message);
         return new FalconWebException(Response.status(status).
                 entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }
 
     public static FalconWebException newInstanceException(String message, Response.Status status) {
-        LOG.error("Action failed: " + status + "\nError:" + message);
+        LOG.error("Action failed: {}\nError: {}", status, message);
         APIResult result = new InstancesResult(APIResult.Status.FAILED, message);
         return new FalconWebException(Response.status(status).entity(result).type(MediaType.TEXT_XML_TYPE).build());
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/Main.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/Main.java b/prism/src/main/java/org/apache/falcon/Main.java
index a425a72..28a3c06 100644
--- a/prism/src/main/java/org/apache/falcon/Main.java
+++ b/prism/src/main/java/org/apache/falcon/Main.java
@@ -26,14 +26,15 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.falcon.util.BuildProperties;
 import org.apache.falcon.util.EmbeddedServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
 
 /**
  * Driver for running Falcon as a standalone server with embedded jetty server.
  */
 public final class Main {
-    private static final Logger LOG = Logger.getLogger(Main.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Main.class);
     private static final String APP_PATH = "app";
     private static final String APP_PORT = "port";
 
@@ -76,7 +77,7 @@ public final class Main {
         if (startActiveMq) {
             String dataDir = System.getProperty("falcon.embeddedmq.data", "target/");
             int mqport = Integer.valueOf(System.getProperty("falcon.embeddedmq.port", "61616"));
-            LOG.info("Starting activemq at port " + mqport + " with data dir " + dataDir);
+            LOG.info("Starting ActiveMQ at port {} with data dir {}", mqport, dataDir);
 
             BrokerService broker = new BrokerService();
             broker.setUseJmx(false);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java b/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
index f22d831..5e879b1 100644
--- a/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
+++ b/prism/src/main/java/org/apache/falcon/listener/ContextStartupListener.java
@@ -24,7 +24,8 @@ import org.apache.falcon.service.ServiceInitializer;
 import org.apache.falcon.util.BuildProperties;
 import org.apache.falcon.util.RuntimeProperties;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
@@ -36,7 +37,7 @@ import java.util.Properties;
  */
 public class ContextStartupListener implements ServletContextListener {
 
-    private static final Logger LOG = Logger.getLogger(ContextStartupListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ContextStartupListener.class);
 
     private final ServiceInitializer startupServices = new ServiceInitializer();
 
@@ -76,7 +77,7 @@ public class ContextStartupListener implements ServletContextListener {
         }
         buffer.append("\n############################################");
         buffer.append("############################################");
-        LOG.info(buffer);
+        LOG.info(buffer.toString());
     }
 
     @Override
@@ -90,6 +91,6 @@ public class ContextStartupListener implements ServletContextListener {
         buffer.append("\n############################################");
         buffer.append("\n         Falcon Server (SHUTDOWN)            ");
         buffer.append("\n############################################");
-        LOG.info(buffer);
+        LOG.info(buffer.toString());
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java b/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
index 72e0302..c695bb7 100644
--- a/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
+++ b/prism/src/main/java/org/apache/falcon/plugin/ChainableMonitoringPlugin.java
@@ -23,8 +23,9 @@ import org.apache.falcon.aspect.AbstractFalconAspect;
 import org.apache.falcon.aspect.ResourceMessage;
 import org.apache.falcon.util.ReflectionUtils;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
 import org.aspectj.lang.annotation.Aspect;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -36,7 +37,7 @@ import java.util.List;
  */
 @Aspect
 public class ChainableMonitoringPlugin extends AbstractFalconAspect implements MonitoringPlugin {
-    private static final Logger LOG = Logger.getLogger(ChainableMonitoringPlugin.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ChainableMonitoringPlugin.class);
 
     private List<MonitoringPlugin> plugins = new ArrayList<MonitoringPlugin>();
 
@@ -47,11 +48,11 @@ public class ChainableMonitoringPlugin extends AbstractFalconAspect implements M
             for (String pluginClass : pluginClasses.split(",")) {
                 MonitoringPlugin plugin = ReflectionUtils.getInstanceByClassName(pluginClass.trim());
                 plugins.add(plugin);
-                LOG.info("Registered Monitoring Plugin " + pluginClass);
+                LOG.info("Registered Monitoring Plugin {}", pluginClass);
             }
         } catch (FalconException e) {
             plugins = Arrays.asList((MonitoringPlugin) new LoggingPlugin());
-            LOG.error("Unable to initialize monitoring plugins: " + pluginClasses, e);
+            LOG.error("Unable to initialize monitoring plugins: {}", pluginClasses, e);
         }
     }
 
@@ -61,7 +62,7 @@ public class ChainableMonitoringPlugin extends AbstractFalconAspect implements M
             try {
                 plugin.monitor(message);
             } catch (Exception e) {
-                LOG.debug("Unable to publish message to " + plugin.getClass(), e);
+                LOG.debug("Unable to publish message to {}", plugin.getClass(), e);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java b/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
index e6f0709..c135470 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
@@ -40,8 +40,9 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.log4j.Logger;
 import org.datanucleus.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.core.Response;
@@ -53,8 +54,8 @@ import java.util.*;
  * A base class for managing Entity operations.
  */
 public abstract class AbstractEntityManager {
-    private static final Logger LOG = Logger.getLogger(AbstractEntityManager.class);
-    private static final Logger AUDIT = Logger.getLogger("AUDIT");
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractEntityManager.class);
+    private static final Logger AUDIT = LoggerFactory.getLogger("AUDIT");
     protected static final int XML_DEBUG_LEN = 10 * 1024;
 
     private AbstractWorkflowEngine workflowEngine;
@@ -173,7 +174,7 @@ public abstract class AbstractEntityManager {
             return new APIResult(APIResult.Status.SUCCEEDED,
                     "Validated successfully (" + entityType + ") " + entity.getName());
         } catch (Throwable e) {
-            LOG.error("Validation failed for entity (" + type + ") ", e);
+            LOG.error("Validation failed for entity ({})", type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -210,7 +211,7 @@ public abstract class AbstractEntityManager {
             return new APIResult(APIResult.Status.SUCCEEDED,
                     entity + "(" + type + ") removed successfully " + removedFromEngine);
         } catch (Throwable e) {
-            LOG.error("Unable to reach workflow engine for deletion or " + "deletion failed", e);
+            LOG.error("Unable to reach workflow engine for deletion or deletion failed", e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -257,7 +258,7 @@ public abstract class AbstractEntityManager {
             return new APIResult(APIResult.Status.SUCCEEDED, entityName + " updated successfully"
                     + (effectiveTimes.isEmpty() ? "" : " with effect from " + effectiveTimes));
         } catch (Throwable e) {
-            LOG.error("Updation failed", e);
+            LOG.error("Update failed", e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         } finally {
             ConfigurationStore.get().cleanupUpdateInit();
@@ -330,7 +331,7 @@ public abstract class AbstractEntityManager {
 
         validate(entity);
         configStore.publish(entityType, entity);
-        LOG.info("Submit successful: (" + type + ")" + entity.getName());
+        LOG.info("Submit successful: ({}): {}", type, entity.getName());
         return entity;
     }
 
@@ -349,7 +350,7 @@ public abstract class AbstractEntityManager {
                 try {
                     xmlStream.reset();
                     String xmlData = getAsString(xmlStream);
-                    LOG.debug("XML DUMP for (" + entityType + "): " + xmlData, e);
+                    LOG.debug("XML DUMP for ({}): {}", entityType, xmlData, e);
                 } catch (IOException ignore) {
                     // ignore
                 }
@@ -374,8 +375,8 @@ public abstract class AbstractEntityManager {
         if (request == null) {
             return; // this must be internal call from Falcon
         }
-        AUDIT.info("Performed " + action + " on " + entity + "(" + type + ") :: " + request.getRemoteHost() + "/"
-                + CurrentUser.getUser());
+        AUDIT.info("Performed {} on {} ({}) :: {}/{}",
+                action, entity, type, request.getRemoteHost(), CurrentUser.getUser());
     }
 
     private enum EntityStatus {
@@ -402,7 +403,7 @@ public abstract class AbstractEntityManager {
             throw e;
         } catch (Exception e) {
 
-            LOG.error("Unable to get status for entity " + entity + "(" + type + ")", e);
+            LOG.error("Unable to get status for entity {} ({})", entity, type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -441,7 +442,7 @@ public abstract class AbstractEntityManager {
             Entity[] dependentEntities = dependents.toArray(new Entity[dependents.size()]);
             return new EntityList(dependentEntities, entityObj);
         } catch (Exception e) {
-            LOG.error("Unable to get dependencies for entityName " + entityName + "(" + type + ")", e);
+            LOG.error("Unable to get dependencies for entityName {} ({})", entityName, type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -492,7 +493,7 @@ public abstract class AbstractEntityManager {
             }
             return new EntityList(elements);
         } catch (Exception e) {
-            LOG.error("Unable to get list for entities for (" + type + ")", e);
+            LOG.error("Unable to get list for entities for ({})", type, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
         }
     }
@@ -513,7 +514,7 @@ public abstract class AbstractEntityManager {
             }
             return entity.toString();
         } catch (Throwable e) {
-            LOG.error("Unable to get entity definition from config " + "store for (" + type + ") " + entityName, e);
+            LOG.error("Unable to get entity definition from config store for ({}): {}", type, entityName, e);
             throw FalconWebException.newException(e, Response.Status.BAD_REQUEST);
 
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java b/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
index 07ab2bc..0bdf94b 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractInstanceManager.java
@@ -30,7 +30,8 @@ import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.logging.LogProvider;
 import org.apache.falcon.resource.InstancesResult.Instance;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletInputStream;
 import javax.servlet.http.HttpServletRequest;
@@ -44,7 +45,7 @@ import java.util.Set;
  * A base class for managing Entity's Instance operations.
  */
 public abstract class AbstractInstanceManager extends AbstractEntityManager {
-    private static final Logger LOG = Logger.getLogger(AbstractInstanceManager.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractInstanceManager.class);
 
     protected void checkType(String type) {
         if (StringUtils.isEmpty(type)) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java b/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
index cc4446c..f98aece 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractSchedulableEntityManager.java
@@ -25,7 +25,8 @@ import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.UnschedulableEntityException;
 import org.apache.falcon.monitors.Dimension;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.PathParam;
@@ -38,7 +39,7 @@ import javax.ws.rs.core.Response;
  */
 public abstract class AbstractSchedulableEntityManager extends AbstractEntityManager {
 
-    private static final Logger LOG = Logger.getLogger(AbstractSchedulableEntityManager.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AbstractSchedulableEntityManager.class);
 
     /**
      * Schedules an submitted entity immediately.

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java b/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
index f2b8874..20400fc 100644
--- a/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
+++ b/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
@@ -26,7 +26,8 @@ import org.apache.falcon.resource.proxy.BufferedRequest;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.util.DeploymentProperties;
 import org.apache.falcon.util.RuntimeProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.Consumes;
@@ -49,7 +50,7 @@ import java.util.Properties;
  * A Channel implementation for HTTP.
  */
 public class HTTPChannel extends AbstractChannel {
-    private static final Logger LOG = Logger.getLogger(HTTPChannel.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HTTPChannel.class);
 
     private static final HttpServletRequest DEFAULT_NULL_REQUEST = new NullServletRequest();
 
@@ -66,7 +67,7 @@ public class HTTPChannel extends AbstractChannel {
         try {
             String proxyClassName = DEPLOYMENT_PROPERTIES.getProperty(serviceName + ".proxy");
             service = Class.forName(proxyClassName);
-            LOG.info("Service: " + serviceName + ", url = " + urlPrefix);
+            LOG.info("Service: {}, url = {}", serviceName, urlPrefix);
         } catch (Exception e) {
             throw new FalconException("Unable to initialize channel for " + serviceName, e);
         }
@@ -78,7 +79,7 @@ public class HTTPChannel extends AbstractChannel {
         try {
             Method method = getMethod(service, methodName, args);
             String url = urlPrefix + "/" + pathValue(method, args);
-            LOG.debug("Executing " + url);
+            LOG.debug("Executing {}", url);
 
             HttpServletRequest incomingRequest = getIncomingRequest(args);
             incomingRequest.getInputStream().reset();
@@ -100,10 +101,10 @@ public class HTTPChannel extends AbstractChannel {
                 return (T) response.getEntity(method.getReturnType());
             } else if (response.getClientResponseStatus().getStatusCode()
                     == Response.Status.BAD_REQUEST.getStatusCode()) {
-                LOG.error("Request failed: " + response.getClientResponseStatus().getStatusCode());
+                LOG.error("Request failed: {}", response.getClientResponseStatus().getStatusCode());
                 return (T) response.getEntity(method.getReturnType());
             } else {
-                LOG.error("Request failed: " + response.getClientResponseStatus().getStatusCode());
+                LOG.error("Request failed: {}", response.getClientResponseStatus().getStatusCode());
                 throw new FalconException(response.getEntity(String.class));
             }
         } catch (Throwable e) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java b/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
index bbdaf40..d046ad4 100644
--- a/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
+++ b/prism/src/main/java/org/apache/falcon/resource/channel/IPCChannel.java
@@ -23,7 +23,8 @@ import org.apache.falcon.FalconRuntimException;
 import org.apache.falcon.FalconWebException;
 import org.apache.falcon.resource.AbstractEntityManager;
 import org.apache.falcon.util.ReflectionUtils;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.Method;
 
@@ -31,7 +32,7 @@ import java.lang.reflect.Method;
  * Inter-process implementation of a Channel.
  */
 public class IPCChannel extends AbstractChannel {
-    private static final Logger LOG = Logger.getLogger(IPCChannel.class);
+    private static final Logger LOG = LoggerFactory.getLogger(IPCChannel.class);
     private AbstractEntityManager service;
 
     public void init(String ignoreColo, String serviceName) throws FalconException {
@@ -41,7 +42,7 @@ public class IPCChannel extends AbstractChannel {
     @SuppressWarnings("unchecked")
     @Override
     public <T> T invoke(String methodName, Object... args) throws FalconException {
-        LOG.debug("Invoking method " + methodName + " on service " + service.getClass().getName());
+        LOG.debug("Invoking method {} on service {}", methodName, service.getClass().getName());
         Method method = getMethod(service.getClass(), methodName, args);
 
         try {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java b/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
index a1aeca5..93b4c04 100644
--- a/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
+++ b/prism/src/main/java/org/apache/falcon/resource/metadata/LineageMetadataResource.java
@@ -33,10 +33,11 @@ import org.apache.falcon.metadata.RelationshipProperty;
 import org.apache.falcon.metadata.RelationshipType;
 import org.apache.falcon.service.Services;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
@@ -59,7 +60,7 @@ import java.util.Set;
 @Path("graphs/lineage")
 public class LineageMetadataResource {
 
-    private static final Logger LOG = Logger.getLogger(LineageMetadataResource.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LineageMetadataResource.class);
 
     public static final String RESULTS = "results";
     public static final String TOTAL_SIZE = "totalSize";
@@ -99,7 +100,7 @@ public class LineageMetadataResource {
         checkIfMetadataMappingServiceIsEnabled();
         String file = StartupProperties.get().getProperty("falcon.graph.serialize.path")
                 + "/lineage-graph-" + System.currentTimeMillis() + ".json";
-        LOG.info("Serialize Graph to: " + file);
+        LOG.info("Serialize Graph to: {}", file);
         try {
             GraphUtils.dump(getGraph(), file);
             return Response.ok().build();
@@ -141,7 +142,7 @@ public class LineageMetadataResource {
     @Produces({MediaType.APPLICATION_JSON})
     public Response getVertex(@PathParam("id") final String vertexId) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex for vertexId= " + vertexId);
+        LOG.info("Get vertex for vertexId= {}", vertexId);
         try {
             Vertex vertex = findVertex(vertexId);
 
@@ -180,7 +181,7 @@ public class LineageMetadataResource {
                                         @DefaultValue("false") @QueryParam("relationships")
                                         final String relationships) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex for vertexId= " + vertexId);
+        LOG.info("Get vertex for vertexId= {}", vertexId);
         try {
             Vertex vertex = findVertex(vertexId);
 
@@ -279,7 +280,7 @@ public class LineageMetadataResource {
     public Response getVertices(@QueryParam("key") final String key,
                                 @QueryParam("value") final String value) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertices for property key= " + key + ", value= " + value);
+        LOG.info("Get vertices for property key= {}, value= {}", key, value);
         try {
             JSONObject response = buildJSONResponse(getGraph().getVertices(key, value));
             return Response.ok(response).build();
@@ -303,7 +304,7 @@ public class LineageMetadataResource {
     public Response getVertexEdges(@PathParam("id") String vertexId,
                                    @PathParam("direction") String direction) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex edges for vertexId= " + vertexId + ", direction= " + direction);
+        LOG.info("Get vertex edges for vertexId= {}, direction= {}", vertexId, direction);
         try {
             Vertex vertex = findVertex(vertexId);
 
@@ -392,7 +393,7 @@ public class LineageMetadataResource {
     @Produces({MediaType.APPLICATION_JSON})
     public Response getEdge(@PathParam("id") final String edgeId) {
         checkIfMetadataMappingServiceIsEnabled();
-        LOG.info("Get vertex for edgeId= " + edgeId);
+        LOG.info("Get vertex for edgeId= {}", edgeId);
         try {
             Edge edge = getGraph().getEdge(edgeId);
             if (edge == null) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java b/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
index 52ede1d..5a56b9a 100644
--- a/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
+++ b/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
@@ -23,8 +23,9 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.apache.log4j.Logger;
 import org.apache.log4j.NDC;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -49,7 +50,7 @@ import java.util.UUID;
  */
 public class BasicAuthFilter extends AuthenticationFilter {
 
-    private static final Logger LOG = Logger.getLogger(BasicAuthFilter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(BasicAuthFilter.class);
 
     /**
      * Constant for the configuration property that indicates the prefix.
@@ -178,7 +179,7 @@ public class BasicAuthFilter extends AuthenticationFilter {
                             NDC.push(user + ":" + httpRequest.getMethod() + "/" + httpRequest.getPathInfo());
                             NDC.push(requestId);
                             CurrentUser.authenticate(user);
-                            LOG.info("Request from user: " + user + ", URL=" + getRequestUrl(httpRequest));
+                            LOG.info("Request from user: {}, URL={}", user, getRequestUrl(httpRequest));
 
                             filterChain.doFilter(servletRequest, servletResponse);
                         } finally {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java b/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
index 537a999..35e0ba3 100644
--- a/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
+++ b/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
@@ -31,7 +31,8 @@ import org.apache.falcon.resource.InstancesResult;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.jms.*;
 import java.lang.reflect.InvocationTargetException;
@@ -41,7 +42,7 @@ import java.util.Date;
  * Subscribes to the falcon topic for handling retries and alerts.
  */
 public class FalconTopicSubscriber implements MessageListener, ExceptionListener {
-    private static final Logger LOG = Logger.getLogger(FalconTopicSubscriber.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FalconTopicSubscriber.class);
 
     private final String implementation;
     private final String userName;
@@ -75,7 +76,7 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
             connection.setExceptionListener(this);
             connection.start();
         } catch (Exception e) {
-            LOG.error("Error starting subscriber of topic: " + this.toString(), e);
+            LOG.error("Error starting subscriber of topic: {}", this, e);
             throw new FalconException(e);
         }
     }
@@ -119,8 +120,8 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                         nominalTime, runId, workflowId, workflowUser,
                         System.currentTimeMillis());
                 } else {
-                    LOG.info("Late data handling not applicable for entityType: " + entityType + ", entityName: "
-                        + entityName + " operation: " + operation);
+                    LOG.info("Late data handling not applicable for entityType: {}, entityName: {} operation: {}",
+                            entityType, entityName, operation);
                 }
                 GenericAlert.instrumentSucceededInstance(cluster, entityType,
                     entityName, nominalTime, workflowId, workflowUser, runId, operation,
@@ -129,11 +130,11 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                 notifyMetadataMappingService(entityName, operation, mapMessage.getString(ARG.logDir.getArgName()));
             }
         } catch (JMSException e) {
-            LOG.info("Error in onMessage for subscriber of topic: " + this.toString(), e);
+            LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
         } catch (FalconException e) {
-            LOG.info("Error in onMessage for subscriber of topic: " + this.toString(), e);
+            LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
         } catch (Exception e) {
-            LOG.info("Error in onMessage for subscriber of topic: " + this.toString(), e);
+            LOG.info("Error in onMessage for subscriber of topic: {}", this, e);
         }
     }
 
@@ -153,17 +154,17 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                 .append(mapMessage.getString(arg.getArgName())).append(", ");
         }
         buff.append("}");
-        LOG.debug(buff);
+        LOG.debug(buff.toString());
     }
 
     @Override
     public void onException(JMSException ignore) {
-        LOG.info("Error in onException for subscriber of topic: " + this.toString(), ignore);
+        LOG.info("Error in onException for subscriber of topic: {}", this.toString(), ignore);
     }
 
     public void closeSubscriber() throws FalconException {
         try {
-            LOG.info("Closing subscriber on topic : " + this.topicName);
+            LOG.info("Closing subscriber on topic: {}", this.topicName);
             if (subscriber != null) {
                 subscriber.close();
             }
@@ -171,7 +172,7 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
                 connection.close();
             }
         } catch (JMSException e) {
-            LOG.error("Error closing subscriber of topic: " + this.toString(), e);
+            LOG.error("Error closing subscriber of topic: {}", this.toString(), e);
             throw new FalconException(e);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
----------------------------------------------------------------------
diff --git a/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java b/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
index 547cb6d..ee60f4f 100644
--- a/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
+++ b/prism/src/test/java/org/apache/falcon/aspect/LoggingAspectTest.java
@@ -49,7 +49,7 @@ public class LoggingAspectTest {
         } catch (Exception e) {
             return;
         }
-        Assert.fail("Exepected excpetion");
+        Assert.fail("Expected exception");
     }
 
     @Test(expectedExceptions = FalconWebException.class)

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java b/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
index edfe5a8..3d6bf7b 100644
--- a/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
+++ b/process/src/main/java/org/apache/falcon/workflow/OozieProcessWorkflowBuilder.java
@@ -65,9 +65,10 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.log4j.Logger;
 import org.apache.oozie.client.CoordinatorJob.Timeunit;
 import org.apache.oozie.client.OozieClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.JAXBElement;
 import java.io.IOException;
@@ -86,7 +87,7 @@ import java.util.Set;
  * Oozie workflow builder for falcon entities.
  */
 public class OozieProcessWorkflowBuilder extends OozieWorkflowBuilder<Process> {
-    private static final Logger LOG = Logger.getLogger(OozieProcessWorkflowBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieProcessWorkflowBuilder.class);
 
     private static final Set<String> FALCON_PROCESS_HIVE_ACTIONS = new HashSet<String>(
             Arrays.asList(new String[]{"recordsize", "user-oozie-workflow", "user-pig-job", "user-hive-job", }));
@@ -102,7 +103,7 @@ public class OozieProcessWorkflowBuilder extends OozieWorkflowBuilder<Process> {
         for (String clusterName : clusters) {
             org.apache.falcon.entity.v0.process.Cluster processCluster = ProcessHelper.getCluster(entity, clusterName);
             if (processCluster.getValidity().getStart().compareTo(processCluster.getValidity().getEnd()) >= 0) {
-                LOG.info("process validity start <= end for cluster " + clusterName + ". Skipping schedule");
+                LOG.info("process validity start <= end for cluster {}. Skipping schedule", clusterName);
                 break;
             }