You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by sh...@apache.org on 2014/05/21 09:21:03 UTC

[3/4] git commit: FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by Jean-Baptiste Onofré

FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. Contributed by  Jean-Baptiste Onofré


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/b036d740
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/b036d740
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/b036d740

Branch: refs/heads/master
Commit: b036d740a4dc13bb5e8102dde71519229f202506
Parents: d2ac5b6
Author: Shwetha GS <sh...@inmobi.com>
Authored: Wed May 21 12:44:35 2014 +0530
Committer: Shwetha GS <sh...@inmobi.com>
Committed: Wed May 21 12:48:12 2014 +0530

----------------------------------------------------------------------
 CHANGES.txt                                     |   1 +
 addons/designer/pom.xml                         |  10 +-
 client/pom.xml                                  |   4 +
 .../falcon/catalog/HiveCatalogService.java      |  21 ++--
 .../falcon/cleanup/AbstractCleanupHandler.java  |  19 ++--
 .../falcon/cleanup/FeedCleanupHandler.java      |   8 +-
 .../falcon/cleanup/ProcessCleanupHandler.java   |   8 +-
 .../entity/parser/ClusterEntityParser.java      |  19 ++--
 .../falcon/entity/parser/EntityParser.java      |   7 +-
 .../falcon/entity/parser/FeedEntityParser.java  |   9 +-
 .../falcon/entity/store/ConfigurationStore.java |  17 +--
 .../apache/falcon/entity/v0/EntityGraph.java    |   9 +-
 .../EntityRelationshipGraphBuilder.java         |  27 ++---
 .../org/apache/falcon/metadata/GraphUtils.java  |   9 +-
 .../InstanceRelationshipGraphBuilder.java       |  17 +--
 .../apache/falcon/metadata/LineageRecorder.java |  11 +-
 .../falcon/metadata/MetadataMappingService.java |  23 +++--
 .../metadata/RelationshipGraphBuilder.java      |  23 ++---
 .../AuthenticationInitializationService.java    |   8 +-
 .../org/apache/falcon/security/CurrentUser.java |   7 +-
 .../falcon/service/LogCleanupService.java       |   7 +-
 .../falcon/service/ServiceInitializer.java      |  17 +--
 .../org/apache/falcon/update/UpdateHelper.java  |  15 +--
 .../falcon/util/ApplicationProperties.java      |  15 +--
 .../org/apache/falcon/util/DeploymentUtil.java  |   9 +-
 .../apache/falcon/util/RuntimeProperties.java   |   5 +-
 .../entity/store/ConfigurationStoreTest.java    |   7 +-
 .../workflow/OozieFeedWorkflowBuilder.java      |  15 +--
 .../falcon/listener/HadoopStartupListener.java  |   7 +-
 .../falcon/messaging/EntityInstanceMessage.java |  17 +--
 .../falcon/messaging/MessageProducer.java       |  16 +--
 metrics/pom.xml                                 |   4 +-
 .../falcon/aspect/AbstractFalconAspect.java     |   8 +-
 .../org/apache/falcon/plugin/LoggingPlugin.java |   7 +-
 oozie-el-extensions/pom.xml                     |   9 +-
 .../falcon/logging/DefaultTaskLogRetriever.java |   9 +-
 .../org/apache/falcon/logging/LogMover.java     |  19 ++--
 .../org/apache/falcon/logging/LogProvider.java  |   7 +-
 .../service/SharedLibraryHostingService.java    |  11 +-
 .../falcon/workflow/FalconPostProcessing.java   |  13 +--
 .../falcon/workflow/OozieWorkflowBuilder.java   |  15 +--
 .../workflow/engine/OozieClientFactory.java     |   7 +-
 .../engine/OozieHouseKeepingService.java        |   7 +-
 .../workflow/engine/OozieWorkflowEngine.java    | 102 +++++++++---------
 .../apache/oozie/client/ProxyOozieClient.java   |  13 +--
 .../falcon/logging/v1/TaskLogRetrieverV1.java   |  12 +--
 pom.xml                                         |  10 +-
 .../org/apache/falcon/FalconWebException.java   |  13 +--
 prism/src/main/java/org/apache/falcon/Main.java |   7 +-
 .../falcon/listener/ContextStartupListener.java |   9 +-
 .../plugin/ChainableMonitoringPlugin.java       |  11 +-
 .../falcon/resource/AbstractEntityManager.java  |  29 +++---
 .../resource/AbstractInstanceManager.java       |   5 +-
 .../AbstractSchedulableEntityManager.java       |   5 +-
 .../falcon/resource/channel/HTTPChannel.java    |  13 +--
 .../falcon/resource/channel/IPCChannel.java     |   7 +-
 .../metadata/LineageMetadataResource.java       |  17 +--
 .../apache/falcon/security/BasicAuthFilter.java |   7 +-
 .../falcon/service/FalconTopicSubscriber.java   |  25 ++---
 .../apache/falcon/aspect/LoggingAspectTest.java |   2 +-
 .../workflow/OozieProcessWorkflowBuilder.java   |   7 +-
 .../falcon/replication/CustomReplicator.java    |  12 +--
 .../falcon/replication/FeedReplicator.java      |  13 ++-
 .../falcon/replication/FilteredCopyListing.java |   9 +-
 .../apache/falcon/latedata/LateDataHandler.java |  16 +--
 .../rerun/handler/AbstractRerunConsumer.java    |  10 +-
 .../rerun/handler/AbstractRerunHandler.java     |   6 +-
 .../falcon/rerun/handler/LateRerunConsumer.java |  29 +++---
 .../falcon/rerun/handler/LateRerunHandler.java  |  36 +++----
 .../falcon/rerun/handler/RetryConsumer.java     |  35 ++-----
 .../falcon/rerun/handler/RetryHandler.java      |  12 +--
 .../apache/falcon/rerun/queue/ActiveMQueue.java |  26 ++---
 .../apache/falcon/rerun/queue/DelayedQueue.java |   5 +-
 .../falcon/rerun/queue/InMemoryQueue.java       |  22 ++--
 .../falcon/rerun/service/LateRunService.java    |   5 +-
 .../falcon/rerun/service/RetryService.java      |   5 +-
 .../apache/falcon/retention/FeedEvictor.java    | 103 +++++++++----------
 .../falcon/cluster/util/EmbeddedCluster.java    |   9 +-
 .../falcon/cluster/util/StandAloneCluster.java  |   7 +-
 79 files changed, 577 insertions(+), 580 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 7496edd..df4b983 100755
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -7,6 +7,7 @@ Trunk (Unreleased)
   NEW FEATURES
 
   IMPROVEMENTS
+   FALCON-133 Upgrade to slf4j 1.7.5 and use SLF4J logger. (Jean-Baptiste Onofré via Shwetha GS)
 
   OPTIMIZATIONS
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/addons/designer/pom.xml
----------------------------------------------------------------------
diff --git a/addons/designer/pom.xml b/addons/designer/pom.xml
index 82f4d99..3e1a98a 100644
--- a/addons/designer/pom.xml
+++ b/addons/designer/pom.xml
@@ -96,7 +96,7 @@
 
         <include.prism>true</include.prism>
 
-        <slf4j.version>1.6.1</slf4j.version>
+        <slf4j.version>1.7.5</slf4j.version>
         <hive.version>0.11.0</hive.version>
         <hcatalog.version>0.11.0</hcatalog.version>
         <jetty.version>6.1.26</jetty.version>
@@ -178,12 +178,6 @@
             </dependency>
 
             <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-simple</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-
-            <dependency>
                 <groupId>commons-lang</groupId>
                 <artifactId>commons-lang</artifactId>
                 <version>2.6</version>
@@ -192,7 +186,7 @@
             <dependency>
                 <groupId>log4j</groupId>
                 <artifactId>log4j</artifactId>
-                <version>1.2.15</version>
+                <version>1.2.17</version>
                 <scope>compile</scope>
                 <exclusions>
                     <exclusion>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index 28e24b7..70e0db6 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -88,6 +88,10 @@
         </dependency>
 
         <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
             <groupId>log4j</groupId>
             <artifactId>log4j</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java b/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
index 30736f3..b5be4e1 100644
--- a/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
+++ b/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
@@ -31,7 +31,8 @@ import org.apache.hcatalog.api.HCatTable;
 import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hcatalog.common.HCatException;
 import org.apache.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
@@ -46,7 +47,7 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public class HiveCatalogService extends AbstractCatalogService {
 
-    private static final Logger LOG = Logger.getLogger(HiveCatalogService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HiveCatalogService.class);
 
     private static final ConcurrentHashMap<String, HCatClient> CACHE = new ConcurrentHashMap<String, HCatClient>();
 
@@ -61,7 +62,7 @@ public class HiveCatalogService extends AbstractCatalogService {
 
         if (!CACHE.containsKey(metastoreUrl)) {
             HCatClient hCatClient = getHCatClient(metastoreUrl);
-            LOG.info("Caching HCatalog client object for " + metastoreUrl);
+            LOG.info("Caching HCatalog client object for {}", metastoreUrl);
             CACHE.putIfAbsent(metastoreUrl, hCatClient);
         }
 
@@ -101,7 +102,7 @@ public class HiveCatalogService extends AbstractCatalogService {
                     hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true");
                 }
 
-                LOG.info("Creating and caching HCatalog client object for " + catalogUrl);
+                LOG.info("Creating and caching HCatalog client object for {}", catalogUrl);
                 UserGroupInformation currentUser = UserGroupInformation.getLoginUser();
                 HCatClient hcatClient = currentUser.doAs(new PrivilegedExceptionAction<HCatClient>() {
                     public HCatClient run() throws Exception {
@@ -122,7 +123,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public boolean isAlive(final String catalogUrl,
                            final String metaStorePrincipal) throws FalconException {
-        LOG.info("Checking if the service is alive for: " + catalogUrl);
+        LOG.info("Checking if the service is alive for: {}", catalogUrl);
 
         try {
             HCatClient client = getProxiedClient(catalogUrl, metaStorePrincipal);
@@ -136,7 +137,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public boolean tableExists(final String catalogUrl, final String database, final String tableName,
                                final String metaStorePrincipal) throws FalconException {
-        LOG.info("Checking if the table exists: " + tableName);
+        LOG.info("Checking if the table exists: {}", tableName);
 
         try {
             HCatClient client = getProxiedClient(catalogUrl, metaStorePrincipal);
@@ -150,7 +151,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public boolean isTableExternal(String catalogUrl, String database, String tableName)
         throws FalconException {
-        LOG.info("Checking if the table is external:" + tableName);
+        LOG.info("Checking if the table is external: {}", tableName);
 
         try {
             HCatClient client = get(catalogUrl);
@@ -165,7 +166,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     public List<CatalogPartition> listPartitionsByFilter(String catalogUrl, String database,
                                                          String tableName, String filter)
         throws FalconException {
-        LOG.info("List partitions for : " + tableName + ", partition filter: " + filter);
+        LOG.info("List partitions for: {}, partition filter: {}", tableName, filter);
 
         try {
             List<CatalogPartition> catalogPartitionList = new ArrayList<CatalogPartition>();
@@ -209,7 +210,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     public boolean dropPartitions(String catalogUrl, String database,
                                   String tableName, Map<String, String> partitions)
         throws FalconException {
-        LOG.info("Dropping partitions for : " + tableName + ", partitions: " + partitions);
+        LOG.info("Dropping partitions for: {}, partitions: {}", tableName, partitions);
 
         try {
             HCatClient client = get(catalogUrl);
@@ -224,7 +225,7 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public CatalogPartition getPartition(String catalogUrl, String database, String tableName,
                                          Map<String, String> partitionSpec) throws FalconException {
-        LOG.info("Fetch partition for : " + tableName + ", partition spec: " + partitionSpec);
+        LOG.info("Fetch partition for: {}, partition spec: {}", tableName, partitionSpec);
 
         try {
             HCatClient client = get(catalogUrl);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
index 20d46c3..ab85ae0 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
@@ -32,7 +32,8 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.jsp.el.ELException;
 import javax.servlet.jsp.el.ExpressionEvaluator;
@@ -44,7 +45,7 @@ import java.io.IOException;
  */
 public abstract class AbstractCleanupHandler {
 
-    protected static final Logger LOG = Logger.getLogger(AbstractCleanupHandler.class);
+    protected static final Logger LOG = LoggerFactory.getLogger(AbstractCleanupHandler.class);
 
     protected static final ConfigurationStore STORE = ConfigurationStore.get();
     public static final ExpressionEvaluator EVALUATOR = new ExpressionEvaluatorImpl();
@@ -99,7 +100,7 @@ public abstract class AbstractCleanupHandler {
     protected void delete(Cluster cluster, Entity entity, long retention, FileStatus[] logs)
         throws FalconException {
         if (logs == null || logs.length == 0) {
-            LOG.info("Nothing to delete for cluster: " + cluster.getName() + ", entity: " + entity.getName());
+            LOG.info("Nothing to delete for cluster: {}, entity: {}", cluster.getName(), entity.getName());
             return;
         }
 
@@ -110,9 +111,9 @@ public abstract class AbstractCleanupHandler {
                 try {
                     boolean isDeleted = getFileSystem(cluster).delete(log.getPath(), true);
                     if (!isDeleted) {
-                        LOG.error("Unable to delete path: " + log.getPath());
+                        LOG.error("Unable to delete path: {}", log.getPath());
                     } else {
-                        LOG.info("Deleted path: " + log.getPath());
+                        LOG.info("Deleted path: {}", log.getPath());
                     }
                     deleteParentIfEmpty(getFileSystem(cluster), log.getPath().getParent());
                 } catch (IOException e) {
@@ -121,10 +122,8 @@ public abstract class AbstractCleanupHandler {
                             + " for cluster: " + cluster.getName(), e);
                 }
             } else {
-                LOG.info("Retention limit: " + retention
-                        + " is less than modification"
-                        + (now - log.getModificationTime()) + " for path: "
-                        + log.getPath());
+                LOG.info("Retention limit: {} is less than modification {} for path: {}", retention,
+                        (now - log.getModificationTime()), log.getPath());
             }
         }
     }
@@ -132,7 +131,7 @@ public abstract class AbstractCleanupHandler {
     private void deleteParentIfEmpty(FileSystem fs, Path parent) throws IOException {
         FileStatus[] files = fs.listStatus(parent);
         if (files != null && files.length == 0) {
-            LOG.info("Parent path: " + parent + " is empty, deleting path");
+            LOG.info("Parent path: {} is empty, deleting path", parent);
             fs.delete(parent, true);
             deleteParentIfEmpty(fs, parent.getParent());
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
index ce96eb3..452ab02 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
@@ -51,13 +51,13 @@ public class FeedCleanupHandler extends AbstractCleanupHandler {
                 Cluster currentCluster = STORE.get(EntityType.CLUSTER,
                         cluster.getName());
                 if (currentCluster.getColo().equals(getCurrentColo())) {
-                    LOG.info("Cleaning up logs & staged data for feed:" + feedName
-                            + " in  cluster: " + cluster.getName() + " with retention: " + retention);
+                    LOG.info("Cleaning up logs & staged data for feed: {} in cluster: {} with retention: {}", feedName,
+                            cluster.getName(), retention);
                     delete(currentCluster, feed, retention);
                     deleteStagedData(currentCluster, feed, retention);
                 } else {
-                    LOG.info("Ignoring cleanup for feed:" + feedName
-                            + " in  cluster: " + cluster.getName() + " as this does not belong to current colo");
+                    LOG.info("Ignoring cleanup for feed: {} in cluster: {} as this does not belong to current colo",
+                            feedName, cluster.getName());
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
index add1237..e6ce72f 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/ProcessCleanupHandler.java
@@ -44,12 +44,12 @@ public class ProcessCleanupHandler extends AbstractCleanupHandler {
                 Cluster currentCluster = STORE.get(EntityType.CLUSTER,
                         cluster.getName());
                 if (currentCluster.getColo().equals(getCurrentColo())) {
-                    LOG.info("Cleaning up logs for process:" + processName
-                            + " in  cluster: " + cluster.getName() + " with retention: " + retention);
+                    LOG.info("Cleaning up logs for process: {} in cluster: {} with retention: {}",
+                            processName, cluster.getName(), retention);
                     delete(currentCluster, process, retention);
                 } else {
-                    LOG.info("Ignoring cleanup for process:" + processName
-                            + " in  cluster: " + cluster.getName() + " as this does not belong to current colo");
+                    LOG.info("Ignoring cleanup for process: {} in cluster: {} as this does not belong to current colo",
+                            processName, cluster.getName());
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
index 831bfdc..584b867 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
@@ -39,14 +39,15 @@ import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Parser that parses cluster entity definition.
  */
 public class ClusterEntityParser extends EntityParser<Cluster> {
 
-    private static final Logger LOG = Logger.getLogger(ProcessEntityParser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ProcessEntityParser.class);
 
     public ClusterEntityParser() {
         super(EntityType.CLUSTER);
@@ -87,14 +88,14 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
 
     private void validateReadInterface(Cluster cluster) throws ValidationException {
         final String readOnlyStorageUrl = ClusterHelper.getReadOnlyStorageUrl(cluster);
-        LOG.info("Validating read interface: " + readOnlyStorageUrl);
+        LOG.info("Validating read interface: {}", readOnlyStorageUrl);
 
         validateFileSystem(cluster, readOnlyStorageUrl);
     }
 
     private void validateWriteInterface(Cluster cluster) throws ValidationException {
         final String writeStorageUrl = ClusterHelper.getStorageUrl(cluster);
-        LOG.info("Validating write interface: " + writeStorageUrl);
+        LOG.info("Validating write interface: {}", writeStorageUrl);
 
         validateFileSystem(cluster, writeStorageUrl);
     }
@@ -123,7 +124,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
 
     private void validateExecuteInterface(Cluster cluster) throws ValidationException {
         String executeUrl = ClusterHelper.getMREndPoint(cluster);
-        LOG.info("Validating execute interface: " + executeUrl);
+        LOG.info("Validating execute interface: {}", executeUrl);
 
         try {
             HadoopClientFactory.validateJobClient(executeUrl);
@@ -134,7 +135,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
 
     private void validateWorkflowInterface(Cluster cluster) throws ValidationException {
         final String workflowUrl = ClusterHelper.getOozieUrl(cluster);
-        LOG.info("Validating workflow interface: " + workflowUrl);
+        LOG.info("Validating workflow interface: {}", workflowUrl);
 
         try {
             if (!WorkflowEngineFactory.getWorkflowEngine().isAlive(cluster)) {
@@ -149,7 +150,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         final String messagingUrl = ClusterHelper.getMessageBrokerUrl(cluster);
         final String implementation = StartupProperties.get().getProperty(
                 "broker.impl.class", "org.apache.activemq.ActiveMQConnectionFactory");
-        LOG.info("Validating messaging interface: " + messagingUrl + ", implementation: " + implementation);
+        LOG.info("Validating messaging interface: {}, implementation: {}", messagingUrl, implementation);
 
         try {
             @SuppressWarnings("unchecked")
@@ -173,12 +174,12 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         // continue validation only if a catalog service is provided
         final Interface catalogInterface = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY);
         if (catalogInterface == null) {
-            LOG.info("Catalog service is not enabled for cluster: " + cluster.getName());
+            LOG.info("Catalog service is not enabled for cluster: {}", cluster.getName());
             return;
         }
 
         final String catalogUrl = catalogInterface.getEndpoint();
-        LOG.info("Validating catalog registry interface: " + catalogUrl);
+        LOG.info("Validating catalog registry interface: {}", catalogUrl);
 
         try {
             String metaStorePrincipal = null;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
index 0df831d..8da5139 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/EntityParser.java
@@ -24,7 +24,8 @@ import org.apache.falcon.Pair;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.Unmarshaller;
 import java.io.ByteArrayInputStream;
@@ -39,7 +40,7 @@ import java.util.List;
  */
 public abstract class EntityParser<T extends Entity> {
 
-    private static final Logger LOG = Logger.getLogger(EntityParser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityParser.class);
 
     private final EntityType entityType;
 
@@ -80,7 +81,7 @@ public abstract class EntityParser<T extends Entity> {
             // parse against schema
             Unmarshaller unmarshaller = entityType.getUnmarshaller();
             T entity = (T) unmarshaller.unmarshal(xmlStream);
-            LOG.info("Parsed Entity: " + entity.getName());
+            LOG.info("Parsed Entity: {}", entity.getName());
             return entity;
         } catch (Exception e) {
             throw new FalconException(e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
index 8911b33..ccdead9 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
@@ -41,7 +41,8 @@ import org.apache.falcon.expression.ExpressionHelper;
 import org.apache.falcon.group.FeedGroup;
 import org.apache.falcon.group.FeedGroupMap;
 import org.apache.falcon.security.SecurityUtil;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Date;
 import java.util.HashSet;
@@ -53,7 +54,7 @@ import java.util.TimeZone;
  */
 public class FeedEntityParser extends EntityParser<Feed> {
 
-    private static final Logger LOG = Logger.getLogger(FeedEntityParser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(FeedEntityParser.class);
 
     public FeedEntityParser() {
         super(EntityType.FEED);
@@ -178,8 +179,8 @@ public class FeedEntityParser extends EntityParser<Feed> {
                     CrossEntityValidations.validateInstance(process, output, newFeed);
                 }
             }
-            LOG.debug("Verified and found " + process.getName() + " to be valid for new definition of "
-                    + newFeed.getName());
+            LOG.debug("Verified and found {} to be valid for new definition of {}",
+                    process.getName(), newFeed.getName());
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java b/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
index c2f1d44..0534cc4 100644
--- a/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
+++ b/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
@@ -31,7 +31,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.xml.bind.JAXBException;
 import java.io.IOException;
@@ -52,8 +53,8 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public final class ConfigurationStore implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(ConfigurationStore.class);
-    private static final Logger AUDIT = Logger.getLogger("AUDIT");
+    private static final Logger LOG = LoggerFactory.getLogger(ConfigurationStore.class);
+    private static final Logger AUDIT = LoggerFactory.getLogger("AUDIT");
     private static final String UTF_8 = "UTF-8";
 
     private static final ConfigurationStore STORE = new ConfigurationStore();
@@ -98,7 +99,7 @@ public final class ConfigurationStore implements FalconService {
         try {
             FileSystem fileSystem = HadoopClientFactory.get().createFileSystem(storePath.toUri());
             if (!fileSystem.exists(storePath)) {
-                LOG.info("Creating configuration store directory: " + storePath);
+                LOG.info("Creating configuration store directory: {}", storePath);
                 fileSystem.mkdirs(storePath);
                 // set permissions so config store dir is owned by falcon alone
                 FsPermission permission = new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
@@ -311,9 +312,9 @@ public final class ConfigurationStore implements FalconService {
                         type + Path.SEPARATOR + URLEncoder.encode(entity.getName(), UTF_8) + ".xml"));
         try {
             type.getMarshaller().marshal(entity, out);
-            LOG.info("Persisted configuration " + type + "/" + entity.getName());
+            LOG.info("Persisted configuration {}/{}", type, entity.getName());
         } catch (JAXBException e) {
-            LOG.error(e);
+            LOG.error("Unable to serialize the entity object {}/{}", type, entity.getName(), e);
             throw new StoreAccessException("Unable to serialize the entity object " + type + "/" + entity.getName(), e);
         } finally {
             out.close();
@@ -332,7 +333,7 @@ public final class ConfigurationStore implements FalconService {
         fs.mkdirs(archivePath);
         fs.rename(new Path(storePath, type + Path.SEPARATOR + URLEncoder.encode(name, UTF_8) + ".xml"),
                 new Path(archivePath, URLEncoder.encode(name, UTF_8) + "." + System.currentTimeMillis()));
-        LOG.info("Archived configuration " + type + "/" + name);
+        LOG.info("Archived configuration {}/{}", type, name);
     }
 
     /**
@@ -354,7 +355,7 @@ public final class ConfigurationStore implements FalconService {
             throw new StoreAccessException("Unable to un-marshall xml definition for " + type + "/" + name, e);
         } finally {
             in.close();
-            LOG.info("Restored configuration " + type + "/" + name);
+            LOG.info("Restored configuration {}/{}", type, name);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java b/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
index 75cab5d..444e28d 100644
--- a/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
+++ b/common/src/main/java/org/apache/falcon/entity/v0/EntityGraph.java
@@ -26,7 +26,8 @@ import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.entity.v0.process.Output;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.service.ConfigurationChangeListener;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.HashSet;
@@ -39,7 +40,7 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public final class EntityGraph implements ConfigurationChangeListener {
 
-    private static final Logger LOG = Logger.getLogger(EntityGraph.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityGraph.class);
 
     private static EntityGraph instance = new EntityGraph();
 
@@ -83,7 +84,7 @@ public final class EntityGraph implements ConfigurationChangeListener {
         if (nodeEdges == null) {
             return;
         }
-        LOG.trace("Adding edges for " + entity.getName() + ": " + nodeEdges);
+        LOG.trace("Adding edges for {}: {}", entity.getName(), nodeEdges);
 
         for (Map.Entry<Node, Set<Node>> entry : nodeEdges.entrySet()) {
             if (graph.containsKey(entry.getKey())) {
@@ -92,7 +93,7 @@ public final class EntityGraph implements ConfigurationChangeListener {
                 graph.put(entry.getKey(), entry.getValue());
             }
         }
-        LOG.trace("Merged edges to graph " + entity.getName());
+        LOG.trace("Merged edges to graph {}", entity.getName());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java b/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
index 2d3ec95..2f46ff4 100644
--- a/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/EntityRelationshipGraphBuilder.java
@@ -29,7 +29,8 @@ import org.apache.falcon.entity.v0.process.Output;
 import org.apache.falcon.entity.v0.process.Outputs;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Workflow;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -39,7 +40,7 @@ import java.util.List;
  */
 public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
 
-    private static final Logger LOG = Logger.getLogger(EntityRelationshipGraphBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityRelationshipGraphBuilder.class);
 
 
     public EntityRelationshipGraphBuilder(Graph graph, boolean preserveHistory) {
@@ -47,7 +48,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void addClusterEntity(Cluster clusterEntity) {
-        LOG.info("Adding cluster entity: " + clusterEntity.getName());
+        LOG.info("Adding cluster entity: {}", clusterEntity.getName());
         Vertex clusterVertex = addVertex(clusterEntity.getName(), RelationshipType.CLUSTER_ENTITY);
 
         addColoRelation(clusterEntity.getColo(), clusterVertex);
@@ -55,7 +56,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void addFeedEntity(Feed feed) {
-        LOG.info("Adding feed entity: " + feed.getName());
+        LOG.info("Adding feed entity: {}", feed.getName());
         Vertex feedVertex = addVertex(feed.getName(), RelationshipType.FEED_ENTITY);
 
         addUserRelation(feedVertex);
@@ -68,11 +69,11 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void updateFeedEntity(Feed oldFeed, Feed newFeed) {
-        LOG.info("Updating feed entity: " + newFeed.getName());
+        LOG.info("Updating feed entity: {}", newFeed.getName());
         Vertex feedEntityVertex = findVertex(oldFeed.getName(), RelationshipType.FEED_ENTITY);
         if (feedEntityVertex == null) {
             // todo - throw new IllegalStateException(oldFeed.getName() + " entity vertex must exist.");
-            LOG.error("Illegal State: Feed entity vertex must exist for " + oldFeed.getName());
+            LOG.error("Illegal State: Feed entity vertex must exist for {}", oldFeed.getName());
             return;
         }
 
@@ -84,7 +85,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
 
     public void addProcessEntity(Process process) {
         String processName = process.getName();
-        LOG.info("Adding process entity: " + processName);
+        LOG.info("Adding process entity: {}", processName);
         Vertex processVertex = addVertex(processName, RelationshipType.PROCESS_ENTITY);
         addWorkflowProperties(process.getWorkflow(), processVertex, processName);
 
@@ -100,11 +101,11 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
     }
 
     public void updateProcessEntity(Process oldProcess, Process newProcess) {
-        LOG.info("Updating process entity: " + newProcess.getName());
+        LOG.info("Updating process entity: {}", newProcess.getName());
         Vertex processEntityVertex = findVertex(oldProcess.getName(), RelationshipType.PROCESS_ENTITY);
         if (processEntityVertex == null) {
             // todo - throw new IllegalStateException(oldProcess.getName() + " entity vertex must exist");
-            LOG.error("Illegal State: Process entity vertex must exist for " + oldProcess.getName());
+            LOG.error("Illegal State: Process entity vertex must exist for {}", oldProcess.getName());
             return;
         }
 
@@ -126,7 +127,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
         Vertex clusterVertex = findVertex(clusterName, RelationshipType.CLUSTER_ENTITY);
         if (clusterVertex == null) { // cluster must exist before adding other entities
             // todo - throw new IllegalStateException("Cluster entity vertex must exist: " + clusterName);
-            LOG.error("Illegal State: Cluster entity vertex must exist for " + clusterName);
+            LOG.error("Illegal State: Cluster entity vertex must exist for {}", clusterName);
             return;
         }
 
@@ -157,7 +158,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
         Vertex feedVertex = findVertex(feedName, RelationshipType.FEED_ENTITY);
         if (feedVertex == null) {
             // todo - throw new IllegalStateException("Feed entity vertex must exist: " + feedName);
-            LOG.error("Illegal State: Feed entity vertex must exist for " + feedName);
+            LOG.error("Illegal State: Feed entity vertex must exist for {}", feedName);
             return;
         }
 
@@ -177,7 +178,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
             return;
         }
 
-        LOG.info("Updating workflow properties for: " + processEntityVertex);
+        LOG.info("Updating workflow properties for: {}", processEntityVertex);
         addWorkflowProperties(newWorkflow, processEntityVertex, processName);
     }
 
@@ -372,7 +373,7 @@ public class EntityRelationshipGraphBuilder extends RelationshipGraphBuilder {
         Vertex feedVertex = findVertex(feedName, RelationshipType.FEED_ENTITY);
         if (feedVertex == null) {
             // todo - throw new IllegalStateException("Feed entity vertex must exist: " + feedName);
-            LOG.error("Illegal State: Feed entity vertex must exist for " + feedName);
+            LOG.error("Illegal State: Feed entity vertex must exist for {}", feedName);
             return;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java b/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
index 24bf30f..8bec02f 100644
--- a/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
+++ b/common/src/main/java/org/apache/falcon/metadata/GraphUtils.java
@@ -23,7 +23,8 @@ import com.tinkerpop.blueprints.Edge;
 import com.tinkerpop.blueprints.Graph;
 import com.tinkerpop.blueprints.Vertex;
 import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -33,18 +34,18 @@ import java.io.OutputStream;
  */
 public final class GraphUtils {
 
-    private static final Logger LOG = Logger.getLogger(GraphUtils.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GraphUtils.class);
 
     private GraphUtils() {
     }
 
     public static void dumpToLog(final Graph graph) {
-        LOG.debug("Vertices of " + graph);
+        LOG.debug("Vertices of {}", graph);
         for (Vertex vertex : graph.getVertices()) {
             LOG.debug(vertexString(vertex));
         }
 
-        LOG.debug("Edges of " + graph);
+        LOG.debug("Edges of {}", graph);
         for (Edge edge : graph.getEdges()) {
             LOG.debug(edgeString(edge));
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java b/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
index eb591c0..0bbfd08 100644
--- a/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/InstanceRelationshipGraphBuilder.java
@@ -32,7 +32,8 @@ import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.LocationType;
 import org.apache.falcon.entity.v0.process.Process;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.net.URISyntaxException;
 import java.util.Map;
@@ -42,7 +43,7 @@ import java.util.Map;
  */
 public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
 
-    private static final Logger LOG = Logger.getLogger(InstanceRelationshipGraphBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(InstanceRelationshipGraphBuilder.class);
 
     private static final String PROCESS_INSTANCE_FORMAT = "yyyy-MM-dd-HH-mm"; // nominal time
     private static final String FEED_INSTANCE_FORMAT = "yyyyMMddHHmm"; // computed
@@ -67,7 +68,7 @@ public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
         String entityName = lineageMetadata.get(LineageArgs.ENTITY_NAME.getOptionName());
         String processInstanceName = getProcessInstanceName(entityName,
                 lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
-        LOG.info("Adding process instance: " + processInstanceName);
+        LOG.info("Adding process instance: {}", processInstanceName);
 
         String timestamp = getTimestamp(lineageMetadata);
         Vertex processInstance = addVertex(processInstanceName, RelationshipType.PROCESS_INSTANCE, timestamp);
@@ -111,10 +112,10 @@ public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
     public void addInstanceToEntity(Vertex instanceVertex, String entityName,
                                     RelationshipType entityType, RelationshipLabel edgeLabel) {
         Vertex entityVertex = findVertex(entityName, entityType);
-        LOG.info("Vertex exists? name=" + entityName + ", type=" + entityType + ", v=" + entityVertex);
+        LOG.info("Vertex exists? name={}, type={}, v={}", entityName, entityType, entityVertex);
         if (entityVertex == null) {
             // todo - throw new IllegalStateException(entityType + " entity vertex must exist " + entityName);
-            LOG.error("Illegal State: " + entityType + " vertex must exist for " + entityName);
+            LOG.error("Illegal State: {} vertex must exist for {}", entityType, entityName);
             return;
         }
 
@@ -161,10 +162,10 @@ public class InstanceRelationshipGraphBuilder extends RelationshipGraphBuilder {
             String feedName = feedNames[index];
             String feedInstancePath = feedInstancePaths[index];
 
-            LOG.info("Computing feed instance for : name=" + feedName + ", path= "
-                    + feedInstancePath + ", in cluster: " + clusterName);
+            LOG.info("Computing feed instance for : name={}, path={}, in cluster: {}",
+                    feedName, feedInstancePath, clusterName);
             String feedInstanceName = getFeedInstanceName(feedName, clusterName, feedInstancePath);
-            LOG.info("Adding feed instance: " + feedInstanceName);
+            LOG.info("Adding feed instance: {}", feedInstanceName);
             Vertex feedInstance = addVertex(feedInstanceName, RelationshipType.FEED_INSTANCE,
                     getTimestamp(lineageMetadata));
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java b/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
index 9f6965a..8a946ad 100644
--- a/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/LineageRecorder.java
@@ -32,8 +32,9 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
 import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -47,7 +48,7 @@ import java.util.Map;
  */
 public class LineageRecorder  extends Configured implements Tool {
 
-    private static final Logger LOG = Logger.getLogger(LineageRecorder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LineageRecorder.class);
 
     public static void main(String[] args) throws Exception {
         ToolRunner.run(new LineageRecorder(), args);
@@ -57,15 +58,15 @@ public class LineageRecorder  extends Configured implements Tool {
     public int run(String[] arguments) throws Exception {
         CommandLine command = getCommand(arguments);
 
-        LOG.info("Parsing lineage metadata from: " + command);
+        LOG.info("Parsing lineage metadata from: {}", command);
         Map<String, String> lineageMetadata = getLineageMetadata(command);
-        LOG.info("Lineage Metadata: " + lineageMetadata);
+        LOG.info("Lineage Metadata: {}", lineageMetadata);
 
         String lineageFile = getFilePath(command.getOptionValue(LineageArgs.LOG_DIR.getOptionName()),
                 command.getOptionValue(LineageArgs.ENTITY_NAME.getOptionName())
         );
 
-        LOG.info("Persisting lineage metadata to: " + lineageFile);
+        LOG.info("Persisting lineage metadata to: {}", lineageFile);
         persistLineageMetadata(lineageMetadata, lineageFile);
 
         return 0;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java b/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
index 21c22de..5df4611 100644
--- a/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
+++ b/common/src/main/java/org/apache/falcon/metadata/MetadataMappingService.java
@@ -37,7 +37,8 @@ import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.service.ConfigurationChangeListener;
 import org.apache.falcon.service.FalconService;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 import java.util.Properties;
@@ -48,7 +49,7 @@ import java.util.Set;
  */
 public class MetadataMappingService implements FalconService, ConfigurationChangeListener {
 
-    private static final Logger LOG = Logger.getLogger(MetadataMappingService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(MetadataMappingService.class);
 
     /**
      * Constance for the service name.
@@ -77,13 +78,13 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
         graph = initializeGraphDB();
         createIndicesForVertexKeys();
         // todo - create Edge Cardinality Constraints
-        LOG.info("Initialized graph db: " + graph);
+        LOG.info("Initialized graph db: {}", graph);
 
         vertexIndexedKeys = getIndexableGraph().getIndexedKeys(Vertex.class);
-        LOG.info("Init vertex property keys: " + vertexIndexedKeys);
+        LOG.info("Init vertex property keys: {}", vertexIndexedKeys);
 
         edgeIndexedKeys = getIndexableGraph().getIndexedKeys(Edge.class);
-        LOG.info("Init edge property keys: " + edgeIndexedKeys);
+        LOG.info("Init edge property keys: {}", edgeIndexedKeys);
 
         boolean preserveHistory = Boolean.valueOf(StartupProperties.get().getProperty(
                 "falcon.graph.preserve.history", "false"));
@@ -185,7 +186,7 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
     @Override
     public void onAdd(Entity entity) throws FalconException {
         EntityType entityType = entity.getEntityType();
-        LOG.info("Adding lineage for entity: " + entity.getName() + ", type: " + entityType);
+        LOG.info("Adding lineage for entity: {}, type: {}", entity.getName(), entityType);
 
         switch (entityType) {
         case CLUSTER:
@@ -216,7 +217,7 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
     @Override
     public void onChange(Entity oldEntity, Entity newEntity) throws FalconException {
         EntityType entityType = newEntity.getEntityType();
-        LOG.info("Updating lineage for entity: " + newEntity.getName() + ", type: " + entityType);
+        LOG.info("Updating lineage for entity: {}, type: {}", newEntity.getName(), entityType);
 
         switch (entityType) {
         case CLUSTER:
@@ -254,12 +255,12 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
                                                String logDir) throws FalconException {
         String lineageFile = LineageRecorder.getFilePath(logDir, entityName);
 
-        LOG.info("Parsing lineage metadata from: " + lineageFile);
+        LOG.info("Parsing lineage metadata from: {}", lineageFile);
         Map<String, String> lineageMetadata = LineageRecorder.parseLineageMetadata(lineageFile);
 
         EntityOperations entityOperation = EntityOperations.valueOf(operation);
 
-        LOG.info("Adding lineage for entity: " + entityName + ", operation: " + operation);
+        LOG.info("Adding lineage for entity: {}, operation: {}", entityName, operation);
         switch (entityOperation) {
         case GENERATE:
             onProcessInstanceAdded(lineageMetadata);
@@ -285,12 +286,12 @@ public class MetadataMappingService implements FalconService, ConfigurationChang
     }
 
     private void onFeedInstanceReplicated(Map<String, String> lineageMetadata) {
-        LOG.info("Adding replicated feed instance: " + lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
+        LOG.info("Adding replicated feed instance: {}", lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
         // todo - tbd
     }
 
     private void onFeedInstanceEvicted(Map<String, String> lineageMetadata) {
-        LOG.info("Adding evicted feed instance: " + lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
+        LOG.info("Adding evicted feed instance: {}", lineageMetadata.get(LineageArgs.NOMINAL_TIME.getOptionName()));
         // todo - tbd
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java b/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
index 7baeeec..9ee0ea6 100644
--- a/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
+++ b/common/src/main/java/org/apache/falcon/metadata/RelationshipGraphBuilder.java
@@ -25,7 +25,8 @@ import com.tinkerpop.blueprints.GraphQuery;
 import com.tinkerpop.blueprints.Vertex;
 import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.security.CurrentUser;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Date;
 import java.util.Iterator;
@@ -36,7 +37,7 @@ import java.util.Map;
  */
 public abstract class RelationshipGraphBuilder {
 
-    private static final Logger LOG = Logger.getLogger(RelationshipGraphBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(RelationshipGraphBuilder.class);
 
     /**
      * A blueprints graph.
@@ -65,10 +66,7 @@ public abstract class RelationshipGraphBuilder {
     public Vertex addVertex(String name, RelationshipType type) {
         Vertex vertex = findVertex(name, type);
         if (vertex != null) {
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Found an existing vertex for: name=" + name + ", type=" + type);
-            }
-
+            LOG.debug("Found an existing vertex for: name={}, type={}", name, type);
             return vertex;
         }
 
@@ -78,10 +76,7 @@ public abstract class RelationshipGraphBuilder {
     protected Vertex addVertex(String name, RelationshipType type, String timestamp) {
         Vertex vertex = findVertex(name, type);
         if (vertex != null) {
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Found an existing vertex for: name=" + name + ", type=" + type);
-            }
-
+            LOG.debug("Found an existing vertex for: name={}, type={}", name, type);
             return vertex;
         }
 
@@ -89,9 +84,7 @@ public abstract class RelationshipGraphBuilder {
     }
 
     protected Vertex findVertex(String name, RelationshipType type) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Finding vertex for: name=" + name + ", type=" + type);
-        }
+        LOG.debug("Finding vertex for: name={}, type={}", name, type);
 
         GraphQuery query = graph.query()
                 .has(RelationshipProperty.NAME.getName(), name)
@@ -105,9 +98,7 @@ public abstract class RelationshipGraphBuilder {
     }
 
     protected Vertex createVertex(String name, RelationshipType type, String timestamp) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Creating a new vertex for: name=" + name + ", type=" + type);
-        }
+        LOG.debug("Creating a new vertex for: name={}, type={}", name, type);
 
         Vertex vertex = graph.addVertex(null);
         vertex.setProperty(RelationshipProperty.NAME.getName(), name);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java b/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
index 264d5b8..fbed283 100644
--- a/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
+++ b/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
@@ -25,7 +25,8 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.util.Properties;
@@ -37,7 +38,7 @@ import java.util.Properties;
  */
 public class AuthenticationInitializationService implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(AuthenticationInitializationService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AuthenticationInitializationService.class);
 
     /**
      * Constant for the configuration property that indicates the prefix.
@@ -93,8 +94,7 @@ public class AuthenticationInitializationService implements FalconService {
             UserGroupInformation.setConfiguration(conf);
             UserGroupInformation.loginUserFromKeytab(principal, keytabFilePath);
 
-            LOG.info("Got Kerberos ticket, keytab: " + keytabFilePath
-                    + ", Falcon principal principal: " + principal);
+            LOG.info("Got Kerberos ticket, keytab: {}, Falcon principal: {}", keytabFilePath, principal);
         } catch (Exception ex) {
             throw new FalconException("Could not initialize " + getName()
                     + ": " + ex.getMessage(), ex);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/security/CurrentUser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/CurrentUser.java b/common/src/main/java/org/apache/falcon/security/CurrentUser.java
index cd7d0b0..6fccd1b 100644
--- a/common/src/main/java/org/apache/falcon/security/CurrentUser.java
+++ b/common/src/main/java/org/apache/falcon/security/CurrentUser.java
@@ -18,7 +18,8 @@
 
 package org.apache.falcon.security;
 
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.Subject;
 
@@ -27,7 +28,7 @@ import javax.security.auth.Subject;
  */
 public final class CurrentUser {
 
-    private static final Logger LOG = Logger.getLogger(CurrentUser.class);
+    private static final Logger LOG = LoggerFactory.getLogger(CurrentUser.class);
 
     private static final CurrentUser INSTANCE = new CurrentUser();
 
@@ -49,7 +50,7 @@ public final class CurrentUser {
 
         Subject subject = new Subject();
         subject.getPrincipals().add(new FalconPrincipal(user));
-        LOG.info("Logging in " + user);
+        LOG.info("Logging in {}", user);
         INSTANCE.currentSubject.set(subject);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/service/LogCleanupService.java b/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
index 689c8a0..a32eaa7 100644
--- a/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
+++ b/common/src/main/java/org/apache/falcon/service/LogCleanupService.java
@@ -32,14 +32,15 @@ import org.apache.falcon.cleanup.FeedCleanupHandler;
 import org.apache.falcon.cleanup.ProcessCleanupHandler;
 import org.apache.falcon.expression.ExpressionHelper;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Log cleanup service.
  */
 public class LogCleanupService implements FalconService {
 
-    private static final Logger LOG = Logger.getLogger(LogCleanupService.class);
+    private static final Logger LOG = LoggerFactory.getLogger(LogCleanupService.class);
     private final ExpressionEvaluator evaluator = new ExpressionEvaluatorImpl();
     private final ExpressionHelper resolver = ExpressionHelper.get();
 
@@ -64,7 +65,7 @@ public class LogCleanupService implements FalconService {
         @Override
         public void run() {
             try {
-                LOG.info("Cleaning up logs at: " + new Date());
+                LOG.info("Cleaning up logs at: {}", new Date());
                 processCleanupHandler.cleanup();
                 feedCleanupHandler.cleanup();
             } catch (Throwable t) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java b/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
index 5e0256e..4708b94 100644
--- a/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
+++ b/common/src/main/java/org/apache/falcon/service/ServiceInitializer.java
@@ -21,14 +21,15 @@ package org.apache.falcon.service;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.util.ReflectionUtils;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Initializer that Falcon uses at startup to bring up all the falcon startup services.
  */
 public class ServiceInitializer {
 
-    private static final Logger LOG = Logger.getLogger(ServiceInitializer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ServiceInitializer.class);
     private final Services services = Services.get();
 
     public void initialize() throws FalconException {
@@ -41,27 +42,27 @@ public class ServiceInitializer {
             }
             FalconService service = ReflectionUtils.getInstanceByClassName(serviceClassName);
             services.register(service);
-            LOG.info("Initializing service : " + serviceClassName);
+            LOG.info("Initializing service: {}", serviceClassName);
             try {
                 service.init();
             } catch (Throwable t) {
-                LOG.fatal("Failed to initialize service " + serviceClassName, t);
+                LOG.error("Failed to initialize service {}", serviceClassName, t);
                 throw new FalconException(t);
             }
-            LOG.info("Service initialized : " + serviceClassName);
+            LOG.info("Service initialized: {}", serviceClassName);
         }
     }
 
     public void destroy() throws FalconException {
         for (FalconService service : services) {
-            LOG.info("Destroying service : " + service.getClass().getName());
+            LOG.info("Destroying service: {}", service.getClass().getName());
             try {
                 service.destroy();
             } catch (Throwable t) {
-                LOG.fatal("Failed to destroy service " + service.getClass().getName(), t);
+                LOG.error("Failed to destroy service {}", service.getClass().getName(), t);
                 throw new FalconException(t);
             }
-            LOG.info("Service destroyed : " + service.getClass().getName());
+            LOG.info("Service destroyed: {}", service.getClass().getName());
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/update/UpdateHelper.java b/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
index dda6bb3..7af77d0 100644
--- a/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
+++ b/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
@@ -36,7 +36,8 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -48,7 +49,7 @@ import java.util.Map;
  * Helper methods to facilitate entity updates.
  */
 public final class UpdateHelper {
-    private static final Logger LOG = Logger.getLogger(UpdateHelper.class);
+    private static final Logger LOG = LoggerFactory.getLogger(UpdateHelper.class);
 
     private static final String[] FEED_FIELDS = new String[]{"partitions", "groups", "lateArrival.cutOff",
                                                              "schema.location", "schema.provider",
@@ -153,7 +154,7 @@ public final class UpdateHelper {
                 if (dest != null) {
                     Path target = new Path(dest, src.getName());
                     FileUtil.copy(fs, src, fs, target, false, conf);
-                    LOG.debug("Copied " + src + " to " + target);
+                    LOG.debug("Copied {} to {}", src, target);
                 }
             } else {
                 FileStatus[] files = fs.listStatus(src);
@@ -184,17 +185,17 @@ public final class UpdateHelper {
 
             //check if affectedProcess is defined for this cluster
             if (ProcessHelper.getCluster(affectedProcess, cluster) == null) {
-                LOG.debug("Process " + affectedProcess.getName() + " is not defined for cluster " + cluster);
+                LOG.debug("Process {} is not defined for cluster {}", affectedProcess.getName(), cluster);
                 return false;
             }
 
             if (!oldFeed.getFrequency().equals(newFeed.getFrequency())) {
-                LOG.debug(oldFeed.toShortString() + ": Frequency has changed. Updating...");
+                LOG.debug("{}: Frequency has changed. Updating...", oldFeed.toShortString());
                 return true;
             }
 
             if (!StringUtils.equals(oldFeed.getAvailabilityFlag(), newFeed.getAvailabilityFlag())) {
-                LOG.debug(oldFeed.toShortString() + ": Availability flag has changed. Updating...");
+                LOG.debug("{}: Availability flag has changed. Updating...", oldFeed.toShortString());
                 return true;
             }
 
@@ -202,7 +203,7 @@ public final class UpdateHelper {
             Storage newFeedStorage = FeedHelper.createStorage(cluster, newFeed);
 
             if (!oldFeedStorage.isIdentical(newFeedStorage)) {
-                LOG.debug(oldFeed.toShortString() + ": Storage has changed. Updating...");
+                LOG.debug("{}: Storage has changed. Updating...", oldFeed.toShortString());
                 return true;
             }
             return false;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java b/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
index d7fb10b..bd87e0d 100644
--- a/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
+++ b/common/src/main/java/org/apache/falcon/util/ApplicationProperties.java
@@ -21,7 +21,8 @@ package org.apache.falcon.util;
 import org.apache.commons.io.IOUtils;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.expression.ExpressionHelper;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -38,7 +39,7 @@ import java.util.Set;
  */
 public abstract class ApplicationProperties extends Properties {
 
-    private static final Logger LOG = Logger.getLogger(ApplicationProperties.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ApplicationProperties.class);
 
     protected abstract String getPropertyFile();
 
@@ -103,7 +104,7 @@ public abstract class ApplicationProperties extends Properties {
         if (confDir != null) {
             File fileToLoad = new File(confDir, propertyFileName);
             if (fileToLoad.exists() && fileToLoad.isFile() && fileToLoad.canRead()) {
-                LOG.info("config.location is set, using: " + confDir + "/" + propertyFileName);
+                LOG.info("config.location is set, using: {}/{}", confDir, propertyFileName);
                 resourceAsStream = new FileInputStream(fileToLoad);
             }
         }
@@ -116,12 +117,12 @@ public abstract class ApplicationProperties extends Properties {
         Class clazz = ApplicationProperties.class;
         URL resource = clazz.getResource("/" + propertyFileName);
         if (resource != null) {
-            LOG.info("Fallback to classpath for: " + resource);
+            LOG.info("Fallback to classpath for: {}", resource);
             resourceAsStream = clazz.getResourceAsStream("/" + propertyFileName);
         } else {
             resource = clazz.getResource(propertyFileName);
             if (resource != null) {
-                LOG.info("Fallback to classpath for: " + resource);
+                LOG.info("Fallback to classpath for: {}", resource);
                 resourceAsStream = clazz.getResourceAsStream(propertyFileName);
             }
         }
@@ -140,13 +141,13 @@ public abstract class ApplicationProperties extends Properties {
             }
         }
 
-        LOG.info("Initializing " + this.getClass().getName() + " properties with domain " + domain);
+        LOG.info("Initializing {} properties with domain {}", this.getClass().getName(), domain);
         Set<String> keys = getKeys(origProps.keySet());
         for (String key : keys) {
             String value = origProps.getProperty(domain + "." + key, origProps.getProperty("*." + key));
             if (value != null) {
                 value = ExpressionHelper.substitute(value);
-                LOG.debug(key + "=" + value);
+                LOG.debug("{}={}", key, value);
                 put(key, value);
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java b/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
index eca2912..5d65073 100644
--- a/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
+++ b/common/src/main/java/org/apache/falcon/util/DeploymentUtil.java
@@ -19,7 +19,8 @@
 package org.apache.falcon.util;
 
 import org.apache.falcon.entity.ColoClusterRelation;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.HashSet;
 import java.util.Set;
@@ -28,7 +29,7 @@ import java.util.Set;
  * Helper methods to deployment properties.
  */
 public final class DeploymentUtil {
-    private static final Logger LOG = Logger.getLogger(DeploymentUtil.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DeploymentUtil.class);
 
     protected static final String DEFAULT_COLO = "default";
     protected static final String EMBEDDED = "embedded";
@@ -49,8 +50,8 @@ public final class DeploymentUtil {
             CURRENT_COLO = StartupProperties.get().
                     getProperty("current.colo", DEFAULT_COLO);
         }
-        LOG.info("Running in embedded mode? " + EMBEDDED_MODE);
-        LOG.info("Current colo: " + CURRENT_COLO);
+        LOG.info("Running in embedded mode? {}", EMBEDDED_MODE);
+        LOG.info("Current colo: {}", CURRENT_COLO);
     }
 
     private DeploymentUtil() {}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java b/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
index 6ec2f70..87b67d0 100644
--- a/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
+++ b/common/src/main/java/org/apache/falcon/util/RuntimeProperties.java
@@ -19,7 +19,8 @@
 package org.apache.falcon.util;
 
 import org.apache.falcon.FalconException;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Properties;
 import java.util.concurrent.atomic.AtomicReference;
@@ -29,7 +30,7 @@ import java.util.concurrent.atomic.AtomicReference;
  */
 public final class RuntimeProperties extends ApplicationProperties {
 
-    private static final Logger LOG = Logger.getLogger(RuntimeProperties.class);
+    private static final Logger LOG = LoggerFactory.getLogger(RuntimeProperties.class);
 
     private static final String PROPERTY_FILE = "runtime.properties";
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java b/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
index f466711..288fdfa 100644
--- a/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/store/ConfigurationStoreTest.java
@@ -27,7 +27,8 @@ import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.testng.Assert;
 import org.testng.annotations.AfterSuite;
 import org.testng.annotations.BeforeSuite;
@@ -40,7 +41,7 @@ import java.io.IOException;
  */
 public class ConfigurationStoreTest {
 
-    private static final Logger LOG = Logger.getLogger(ConfigurationStoreTest.class);
+    private static final Logger LOG = LoggerFactory.getLogger(ConfigurationStoreTest.class);
 
     private ConfigurationStore store = ConfigurationStore.get();
     private TestListener listener = new TestListener();
@@ -121,6 +122,6 @@ public class ConfigurationStoreTest {
                 getProperty("config.store.uri"));
         FileSystem fs = FileSystem.get(path.toUri(), new Configuration());
         fs.delete(path, true);
-        LOG.info("Cleaned up " + path);
+        LOG.info("Cleaned up {}", path);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
----------------------------------------------------------------------
diff --git a/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java b/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
index 8d5df88..16bff02 100644
--- a/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
+++ b/feed/src/main/java/org/apache/falcon/workflow/OozieFeedWorkflowBuilder.java
@@ -52,7 +52,8 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -69,7 +70,7 @@ import java.util.Properties;
  * Workflow definition builder for feed replication & retention.
  */
 public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
-    private static final Logger LOG = Logger.getLogger(OozieFeedWorkflowBuilder.class);
+    private static final Logger LOG = LoggerFactory.getLogger(OozieFeedWorkflowBuilder.class);
 
     public OozieFeedWorkflowBuilder(Feed entity) {
         super(entity);
@@ -82,7 +83,7 @@ public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
         for (String clusterName : clusters) {
             org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, clusterName);
             if (!feedCluster.getValidity().getStart().before(feedCluster.getValidity().getEnd())) {
-                LOG.info("feed validity start <= end for cluster " + clusterName + ". Skipping schedule");
+                LOG.info("feed validity start <= end for cluster {}. Skipping schedule", clusterName);
                 break;
             }
 
@@ -130,8 +131,8 @@ public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(entity, cluster.getName());
 
         if (feedCluster.getValidity().getEnd().before(new Date())) {
-            LOG.warn("Feed Retention is not applicable as Feed's end time for cluster " + cluster.getName()
-                + " is not in the future");
+            LOG.warn("Feed Retention is not applicable as Feed's end time for cluster {} is not in the future",
+                    cluster.getName());
             return null;
         }
 
@@ -380,8 +381,8 @@ public class OozieFeedWorkflowBuilder extends OozieWorkflowBuilder<Feed> {
 
             if (noOverlapExists(sourceStartDate, sourceEndDate,
                 targetStartDate, targetEndDate)) {
-                LOG.warn("Not creating replication coordinator, as the source cluster:" + srcCluster.getName()
-                    + "and target cluster: " + trgCluster.getName() + " do not have overlapping dates");
+                LOG.warn("Not creating replication coordinator, as the source cluster: {} and target cluster: {} do "
+                    + "not have overlapping dates", srcCluster.getName(), trgCluster.getName());
                 return null;
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
----------------------------------------------------------------------
diff --git a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
index 1468ac4..8dea54c 100644
--- a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
+++ b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java
@@ -21,7 +21,8 @@ package org.apache.falcon.listener;
 import org.apache.activemq.broker.BrokerService;
 import org.apache.falcon.JobTrackerService;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
@@ -30,7 +31,7 @@ import javax.servlet.ServletContextListener;
  * Listener for bootstrapping embedded hadoop cluster for integration tests.
  */
 public class HadoopStartupListener implements ServletContextListener {
-    private static final Logger LOG = Logger.getLogger(HadoopStartupListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HadoopStartupListener.class);
     private BrokerService broker;
 
     @Override
@@ -109,7 +110,7 @@ public class HadoopStartupListener implements ServletContextListener {
             @Override
             public void run() {
                 try {
-                    LOG.info("Starting service " + service.getClass().getName());
+                    LOG.info("Starting service {}", service.getClass().getName());
                     invoke(service, method, null, null);
                 } catch(Exception e) {
                     throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
index d3c1a69..679e9ea 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
@@ -23,7 +23,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -41,7 +42,7 @@ import java.util.Map;
 public class EntityInstanceMessage {
 
     private final Map<ARG, String> keyValueMap = new LinkedHashMap<ARG, String>();
-    private static final Logger LOG = Logger.getLogger(EntityInstanceMessage.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityInstanceMessage.class);
     private static final String FALCON_ENTITY_TOPIC_NAME = "FALCON.ENTITY.TOPIC";
 
     /**
@@ -150,7 +151,7 @@ public class EntityInstanceMessage {
         try {
             feedPaths = getFeedPaths(cmd);
         } catch (IOException e) {
-            LOG.error("Error getting instance paths: ", e);
+            LOG.error("Error getting instance paths", e);
             throw new RuntimeException(e);
         }
 
@@ -197,13 +198,13 @@ public class EntityInstanceMessage {
         String operation = cmd.getOptionValue(ARG.operation.getArgName());
 
         if (topicName.equals(FALCON_ENTITY_TOPIC_NAME)) {
-            LOG.debug("Returning instance paths for Falcon Topic: "
-                    + cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
+            LOG.debug("Returning instance paths for Falcon Topic: {}",
+                    cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
             return new String[]{cmd.getOptionValue(ARG.feedInstancePaths.getArgName()), };
         }
 
         if (operation.equals(EntityOps.GENERATE.name()) || operation.equals(EntityOps.REPLICATE.name())) {
-            LOG.debug("Returning instance paths: " + cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
+            LOG.debug("Returning instance paths: {}", cmd.getOptionValue(ARG.feedInstancePaths.getArgName()));
             return cmd.getOptionValue(ARG.feedInstancePaths.getArgName()).split(",");
         }
         //else case of feed retention
@@ -220,12 +221,12 @@ public class EntityInstanceMessage {
         IOUtils.copyBytes(instance, writer, 4096, true);
         String[] instancePaths = writer.toString().split("=");
         fs.delete(logFile, true);
-        LOG.info("Deleted feed instance paths file:" + logFile);
+        LOG.info("Deleted feed instance paths file: {}", logFile);
         if (instancePaths.length == 1) {
             LOG.debug("Returning 0 instance paths for feed ");
             return new String[0];
         } else {
-            LOG.debug("Returning instance paths for feed " + instancePaths[1]);
+            LOG.debug("Returning instance paths for feed {}", instancePaths[1]);
             return instancePaths[1].split(",");
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java b/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
index 2b9f573..ccac921 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
@@ -23,7 +23,8 @@ import org.apache.falcon.messaging.EntityInstanceMessage.ARG;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.jms.*;
 import java.lang.reflect.InvocationTargetException;
@@ -34,7 +35,7 @@ import java.lang.reflect.InvocationTargetException;
 public class MessageProducer extends Configured implements Tool {
 
     private Connection connection;
-    private static final Logger LOG = Logger.getLogger(MessageProducer.class);
+    private static final Logger LOG = LoggerFactory.getLogger(MessageProducer.class);
     private static final long DEFAULT_TTL = 3 * 24 * 60 * 60 * 1000;
 
     /**
@@ -59,8 +60,7 @@ public class MessageProducer extends Configured implements Tool {
                     .getBrokerTTL());
             messageTTL = messageTTLinMins * 60 * 1000;
         } catch (NumberFormatException e) {
-            LOG.error("Error in parsing broker.ttl, setting TTL to:"
-                    + DEFAULT_TTL + " milli-seconds");
+            LOG.error("Error in parsing broker.ttl, setting TTL to: {} milli-seconds", DEFAULT_TTL);
         }
         producer.setTimeToLive(messageTTL);
         producer.send(new EntityInstanceMessageCreator(entityInstanceMessage)
@@ -159,20 +159,20 @@ public class MessageProducer extends Configured implements Tool {
             createAndStartConnection(cmd.getOptionValue(ARG.brokerImplClass.name()), "",
                     "", cmd.getOptionValue(ARG.brokerUrl.name()));
             for (EntityInstanceMessage message : entityInstanceMessage) {
-                LOG.info("Sending message:" + message.getKeyValueMap());
+                LOG.info("Sending message: {}", message.getKeyValueMap());
                 sendMessage(message);
             }
         } catch (JMSException e) {
-            LOG.error("Error in getConnection:", e);
+            LOG.error("Error in getConnection", e);
         } catch (Exception e) {
-            LOG.error("Error in getConnection:", e);
+            LOG.error("Error in getConnection", e);
         } finally {
             try {
                 if (connection != null) {
                     connection.close();
                 }
             } catch (JMSException e) {
-                LOG.error("Error in closing connection:", e);
+                LOG.error("Error in closing connection", e);
             }
         }
         return 0;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/b036d740/metrics/pom.xml
----------------------------------------------------------------------
diff --git a/metrics/pom.xml b/metrics/pom.xml
index 2eb08ac..d4c9d03 100644
--- a/metrics/pom.xml
+++ b/metrics/pom.xml
@@ -51,8 +51,8 @@
         </dependency>
 
         <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
         </dependency>
     </dependencies>
 </project>